From ebf2a72253e8bb51d59de5f7b34cd89992eaec59 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 31 Jul 2020 08:55:05 -0700 Subject: [PATCH 1/6] changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- .../cloudbuild_v1/proto/cloudbuild.proto | 2 +- .../services/cloud_build/client.py | 434 ++++++++---------- .../services/cloud_build/transports/base.py | 159 ++++++- .../services/cloud_build/transports/grpc.py | 19 +- .../cloud_build/transports/grpc_asyncio.py | 15 +- .../cloudbuild_v1/types/cloudbuild.py | 2 +- scripts/fixup_cloudbuild_v1_keywords.py | 1 + synth.metadata | 10 +- tests/unit/gapic/cloudbuild_v1/__init__.py | 1 + .../gapic/cloudbuild_v1/test_cloud_build.py | 399 +++++++++++----- 10 files changed, 677 insertions(+), 365 deletions(-) diff --git a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto index 1f06623f..4e3a0188 100644 --- a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto +++ b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto @@ -1124,7 +1124,7 @@ message BuildOptions { LogStreamingOption log_streaming_option = 5; // Option to specify a `WorkerPool` for the build. - // Format: projects/{project}/workerPools/{workerPool} + // Format: projects/{project}/locations/{location}/workerPools/{workerPool} // // This field is experimental. string worker_pool = 7; diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index a89cd044..e11623af 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -225,6 +225,7 @@ def __init__( scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, ) def create_build( @@ -296,29 +297,31 @@ def create_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, build]): + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CreateBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildRequest): + request = cloudbuild.CreateBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -399,37 +402,31 @@ def get_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.GetBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildRequest): + request = cloudbuild.GetBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_build, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -483,37 +480,31 @@ def list_builds( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, filter]): + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.ListBuildsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildsRequest): + request = cloudbuild.ListBuildsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_builds, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_builds] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -588,29 +579,31 @@ def cancel_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CancelBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CancelBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CancelBuildRequest): + request = cloudbuild.CancelBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -713,29 +706,31 @@ def retry_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.RetryBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RetryBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RetryBuildRequest): + request = cloudbuild.RetryBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.retry_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.retry_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -797,29 +792,31 @@ def create_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger]): + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CreateBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): + request = cloudbuild.CreateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -874,37 +871,31 @@ def get_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.GetBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildTriggerRequest): + request = cloudbuild.GetBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_build_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -953,35 +944,29 @@ def list_build_triggers( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id]): + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.ListBuildTriggersRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildTriggersRequest): + request = cloudbuild.ListBuildTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id + if project_id is not None: + request.project_id = project_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_build_triggers, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1033,37 +1018,31 @@ def delete_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.DeleteBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): + request = cloudbuild.DeleteBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_build_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] # Send the request. rpc( @@ -1122,31 +1101,33 @@ def update_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, trigger]): + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.UpdateBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): + request = cloudbuild.UpdateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1227,31 +1208,33 @@ def run_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, source]): + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.RunBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RunBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RunBuildTriggerRequest): + request = cloudbuild.RunBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.run_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1309,15 +1292,16 @@ def create_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.CreateWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): + request = cloudbuild.CreateWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1366,23 +1350,16 @@ def get_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.GetWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetWorkerPoolRequest): + request = cloudbuild.GetWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_worker_pool, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1414,15 +1391,16 @@ def delete_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.DeleteWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): + request = cloudbuild.DeleteWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] # Send the request. rpc( @@ -1469,15 +1447,16 @@ def update_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.UpdateWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): + request = cloudbuild.UpdateWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1513,23 +1492,16 @@ def list_worker_pools( """ # Create or coerce a protobuf request object. - request = cloudbuild.ListWorkerPoolsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListWorkerPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): + request = cloudbuild.ListWorkerPoolsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_worker_pools, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index ace575ab..32d9f153 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -17,9 +17,12 @@ import abc import typing +import pkg_resources from google import auth from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -28,6 +31,16 @@ from google.protobuf import empty_pb2 as empty # type: ignore +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-devtools-cloudbuild", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + class CloudBuildTransport(abc.ABC): """Abstract transport class for CloudBuild.""" @@ -40,6 +53,7 @@ def __init__( credentials: credentials.Credentials = None, credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -55,6 +69,8 @@ def __init__( be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -70,14 +86,153 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes + credentials_file, scopes=scopes, quota_project_id=quota_project_id ) + elif credentials is None: - credentials, _ = auth.default(scopes=scopes) + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) # Save the credentials. self._credentials = credentials + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_build: gapic_v1.method.wrap_method( + self.create_build, default_timeout=600.0, client_info=_client_info, + ), + self.get_build: gapic_v1.method.wrap_method( + self.get_build, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.list_builds: gapic_v1.method.wrap_method( + self.list_builds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.cancel_build: gapic_v1.method.wrap_method( + self.cancel_build, default_timeout=600.0, client_info=_client_info, + ), + self.retry_build: gapic_v1.method.wrap_method( + self.retry_build, default_timeout=600.0, client_info=_client_info, + ), + self.create_build_trigger: gapic_v1.method.wrap_method( + self.create_build_trigger, + default_timeout=600.0, + client_info=_client_info, + ), + self.get_build_trigger: gapic_v1.method.wrap_method( + self.get_build_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.list_build_triggers: gapic_v1.method.wrap_method( + self.list_build_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.delete_build_trigger: gapic_v1.method.wrap_method( + self.delete_build_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.update_build_trigger: gapic_v1.method.wrap_method( + self.update_build_trigger, + default_timeout=600.0, + client_info=_client_info, + ), + self.run_build_trigger: gapic_v1.method.wrap_method( + self.run_build_trigger, default_timeout=600.0, client_info=_client_info, + ), + self.create_worker_pool: gapic_v1.method.wrap_method( + self.create_worker_pool, + default_timeout=600.0, + client_info=_client_info, + ), + self.get_worker_pool: gapic_v1.method.wrap_method( + self.get_worker_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + self.delete_worker_pool: gapic_v1.method.wrap_method( + self.delete_worker_pool, + default_timeout=600.0, + client_info=_client_info, + ), + self.update_worker_pool: gapic_v1.method.wrap_method( + self.update_worker_pool, + default_timeout=600.0, + client_info=_client_info, + ), + self.list_worker_pools: gapic_v1.method.wrap_method( + self.list_worker_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=_client_info, + ), + } + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index a3570363..c8affe84 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -64,7 +64,8 @@ def __init__( scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None ) -> None: """Instantiate the transport. @@ -91,6 +92,8 @@ def __init__( callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -113,7 +116,9 @@ def __init__( ) if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -132,18 +137,20 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) + self._stubs = {} # type: Dict[str, Callable] + # Run the base constructor. super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) - self._stubs = {} # type: Dict[str, Callable] - @classmethod def create_channel( cls, @@ -151,6 +158,7 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, **kwargs ) -> grpc.Channel: """Create and return a gRPC channel object. @@ -167,6 +175,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -182,6 +192,7 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, + quota_project_id=quota_project_id, **kwargs ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 432dfb85..817e30a5 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -63,7 +63,8 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - **kwargs + quota_project_id: Optional[str] = None, + **kwargs, ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: @@ -79,6 +80,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -90,7 +93,8 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, - **kwargs + quota_project_id=quota_project_id, + **kwargs, ) def __init__( @@ -102,7 +106,8 @@ def __init__( scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, ) -> None: """Instantiate the transport. @@ -130,6 +135,8 @@ def __init__( callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -168,6 +175,7 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) # Run the base constructor. @@ -176,6 +184,7 @@ def __init__( credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) self._stubs = {} diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 371c86c4..b7f5dbbd 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -1220,7 +1220,7 @@ class BuildOptions(proto.Message): to Google Cloud Storage. worker_pool (str): Option to specify a ``WorkerPool`` for the build. Format: - projects/{project}/workerPools/{workerPool} + projects/{project}/locations/{location}/workerPools/{workerPool} This field is experimental. logging (~.cloudbuild.BuildOptions.LoggingMode): diff --git a/scripts/fixup_cloudbuild_v1_keywords.py b/scripts/fixup_cloudbuild_v1_keywords.py index 92e726f3..8d7e6d83 100644 --- a/scripts/fixup_cloudbuild_v1_keywords.py +++ b/scripts/fixup_cloudbuild_v1_keywords.py @@ -56,6 +56,7 @@ class cloudbuildCallTransformer(cst.CSTTransformer): 'run_build_trigger': ('project_id', 'trigger_id', 'source', ), 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), 'update_worker_pool': ('name', 'worker_pool', ), + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/synth.metadata b/synth.metadata index 0e3a4407..ec8cfe7f 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,7 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-cloudbuild.git", - "sha": "d75e9f590378c67c52ab052cc6122301922a0560" + "sha": "e3be0483b5b90b9b03d700f542b9154eb0e7c2dd" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "fbf9396664b766a08d92da9d4f31be019a847c39", + "internalRef": "324209019" } }, { diff --git a/tests/unit/gapic/cloudbuild_v1/__init__.py b/tests/unit/gapic/cloudbuild_v1/__init__.py index e69de29b..8b137891 100644 --- a/tests/unit/gapic/cloudbuild_v1/__init__.py +++ b/tests/unit/gapic/cloudbuild_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index fff9e16f..44666a8e 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -52,6 +52,17 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -113,6 +124,14 @@ def test_cloud_build_client_get_transport_class(): ), ], ) +@mock.patch.object( + CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) +) +@mock.patch.object( + CloudBuildAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudBuildAsyncClient), +) def test_cloud_build_client_client_options( client_class, transport_class, transport_name ): @@ -139,64 +158,29 @@ def test_cloud_build_client_client_options( scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -206,34 +190,88 @@ def test_cloud_build_client_client_options( scopes=None, api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = client_class() + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() - del os.environ["GOOGLE_API_USE_MTLS"] + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + ) @pytest.mark.parametrize( @@ -262,6 +300,7 @@ def test_cloud_build_client_client_options_scopes( scopes=["1", "2"], api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) @@ -291,6 +330,7 @@ def test_cloud_build_client_client_options_credentials_file( scopes=None, api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) @@ -307,17 +347,20 @@ def test_cloud_build_client_client_options_from_dict(): scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, ) -def test_create_build(transport: str = "grpc"): +def test_create_build( + transport: str = "grpc", request_type=cloudbuild.CreateBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.create_build), "__call__") as call: @@ -330,12 +373,16 @@ def test_create_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_create_build_from_dict(): + test_create_build(request_type=dict) + + @pytest.mark.asyncio async def test_create_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -448,14 +495,14 @@ async def test_create_build_flattened_error_async(): ) -def test_get_build(transport: str = "grpc"): +def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequest): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.get_build), "__call__") as call: @@ -478,7 +525,7 @@ def test_get_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) @@ -502,6 +549,10 @@ def test_get_build(transport: str = "grpc"): assert response.tags == ["tags_value"] +def test_get_build_from_dict(): + test_get_build(request_type=dict) + + @pytest.mark.asyncio async def test_get_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -634,14 +685,16 @@ async def test_get_build_flattened_error_async(): ) -def test_list_builds(transport: str = "grpc"): +def test_list_builds( + transport: str = "grpc", request_type=cloudbuild.ListBuildsRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.list_builds), "__call__") as call: @@ -656,7 +709,7 @@ def test_list_builds(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsPager) @@ -664,6 +717,10 @@ def test_list_builds(transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" +def test_list_builds_from_dict(): + test_list_builds(request_type=dict) + + @pytest.mark.asyncio async def test_list_builds_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -901,14 +958,16 @@ async def test_list_builds_async_pages(): assert page.raw_page.next_page_token == token -def test_cancel_build(transport: str = "grpc"): +def test_cancel_build( + transport: str = "grpc", request_type=cloudbuild.CancelBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CancelBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: @@ -931,7 +990,7 @@ def test_cancel_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CancelBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) @@ -955,6 +1014,10 @@ def test_cancel_build(transport: str = "grpc"): assert response.tags == ["tags_value"] +def test_cancel_build_from_dict(): + test_cancel_build(request_type=dict) + + @pytest.mark.asyncio async def test_cancel_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -1093,14 +1156,16 @@ async def test_cancel_build_flattened_error_async(): ) -def test_retry_build(transport: str = "grpc"): +def test_retry_build( + transport: str = "grpc", request_type=cloudbuild.RetryBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RetryBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.retry_build), "__call__") as call: @@ -1113,12 +1178,16 @@ def test_retry_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RetryBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_retry_build_from_dict(): + test_retry_build(request_type=dict) + + @pytest.mark.asyncio async def test_retry_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -1231,14 +1300,16 @@ async def test_retry_build_flattened_error_async(): ) -def test_create_build_trigger(transport: str = "grpc"): +def test_create_build_trigger( + transport: str = "grpc", request_type=cloudbuild.CreateBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1262,7 +1333,7 @@ def test_create_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1282,6 +1353,10 @@ def test_create_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_create_build_trigger_from_dict(): + test_create_build_trigger(request_type=dict) + + @pytest.mark.asyncio async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -1420,14 +1495,16 @@ async def test_create_build_trigger_flattened_error_async(): ) -def test_get_build_trigger(transport: str = "grpc"): +def test_get_build_trigger( + transport: str = "grpc", request_type=cloudbuild.GetBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1451,7 +1528,7 @@ def test_get_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1471,6 +1548,10 @@ def test_get_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_get_build_trigger_from_dict(): + test_get_build_trigger(request_type=dict) + + @pytest.mark.asyncio async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -1607,14 +1688,16 @@ async def test_get_build_trigger_flattened_error_async(): ) -def test_list_build_triggers(transport: str = "grpc"): +def test_list_build_triggers( + transport: str = "grpc", request_type=cloudbuild.ListBuildTriggersRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildTriggersRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1631,7 +1714,7 @@ def test_list_build_triggers(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildTriggersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersPager) @@ -1639,6 +1722,10 @@ def test_list_build_triggers(transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" +def test_list_build_triggers_from_dict(): + test_list_build_triggers(request_type=dict) + + @pytest.mark.asyncio async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -1888,14 +1975,16 @@ async def test_list_build_triggers_async_pages(): assert page.raw_page.next_page_token == token -def test_delete_build_trigger(transport: str = "grpc"): +def test_delete_build_trigger( + transport: str = "grpc", request_type=cloudbuild.DeleteBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1910,12 +1999,16 @@ def test_delete_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() # Establish that the response is the type that we expect. assert response is None +def test_delete_build_trigger_from_dict(): + test_delete_build_trigger(request_type=dict) + + @pytest.mark.asyncio async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2026,14 +2119,16 @@ async def test_delete_build_trigger_flattened_error_async(): ) -def test_update_build_trigger(transport: str = "grpc"): +def test_update_build_trigger( + transport: str = "grpc", request_type=cloudbuild.UpdateBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2057,7 +2152,7 @@ def test_update_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -2077,6 +2172,10 @@ def test_update_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_update_build_trigger_from_dict(): + test_update_build_trigger(request_type=dict) + + @pytest.mark.asyncio async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2223,14 +2322,16 @@ async def test_update_build_trigger_flattened_error_async(): ) -def test_run_build_trigger(transport: str = "grpc"): +def test_run_build_trigger( + transport: str = "grpc", request_type=cloudbuild.RunBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RunBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2245,12 +2346,16 @@ def test_run_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RunBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_run_build_trigger_from_dict(): + test_run_build_trigger(request_type=dict) + + @pytest.mark.asyncio async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2375,14 +2480,16 @@ async def test_run_build_trigger_flattened_error_async(): ) -def test_create_worker_pool(transport: str = "grpc"): +def test_create_worker_pool( + transport: str = "grpc", request_type=cloudbuild.CreateWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2404,7 +2511,7 @@ def test_create_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2422,6 +2529,10 @@ def test_create_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_create_worker_pool_from_dict(): + test_create_worker_pool(request_type=dict) + + @pytest.mark.asyncio async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2472,14 +2583,16 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_get_worker_pool(transport: str = "grpc"): +def test_get_worker_pool( + transport: str = "grpc", request_type=cloudbuild.GetWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client._transport.get_worker_pool), "__call__") as call: @@ -2499,7 +2612,7 @@ def test_get_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2517,6 +2630,10 @@ def test_get_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_get_worker_pool_from_dict(): + test_get_worker_pool(request_type=dict) + + @pytest.mark.asyncio async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2567,14 +2684,16 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_delete_worker_pool(transport: str = "grpc"): +def test_delete_worker_pool( + transport: str = "grpc", request_type=cloudbuild.DeleteWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2589,12 +2708,16 @@ def test_delete_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() # Establish that the response is the type that we expect. assert response is None +def test_delete_worker_pool_from_dict(): + test_delete_worker_pool(request_type=dict) + + @pytest.mark.asyncio async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2624,14 +2747,16 @@ async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): assert response is None -def test_update_worker_pool(transport: str = "grpc"): +def test_update_worker_pool( + transport: str = "grpc", request_type=cloudbuild.UpdateWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2653,7 +2778,7 @@ def test_update_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2671,6 +2796,10 @@ def test_update_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_update_worker_pool_from_dict(): + test_update_worker_pool(request_type=dict) + + @pytest.mark.asyncio async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2721,14 +2850,16 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_list_worker_pools(transport: str = "grpc"): +def test_list_worker_pools( + transport: str = "grpc", request_type=cloudbuild.ListWorkerPoolsRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListWorkerPoolsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2743,12 +2874,16 @@ def test_list_worker_pools(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListWorkerPoolsRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.ListWorkerPoolsResponse) +def test_list_worker_pools_from_dict(): + test_list_worker_pools(request_type=dict) + + @pytest.mark.asyncio async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( @@ -2851,9 +2986,13 @@ def test_cloud_build_base_transport_error(): def test_cloud_build_base_transport(): # Instantiate the base transport. - transport = transports.CloudBuildTransport( - credentials=credentials.AnonymousCredentials(), - ) + with mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudBuildTransport( + credentials=credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -2887,12 +3026,20 @@ def test_cloud_build_base_transport(): def test_cloud_build_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, "load_credentials_from_file") as load_creds: + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport(credentials_file="credentials.json",) + transport = transports.CloudBuildTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) load_creds.assert_called_once_with( "credentials.json", scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) @@ -2902,7 +3049,8 @@ def test_cloud_build_auth_adc(): adc.return_value = (credentials.AnonymousCredentials(), None) CloudBuildClient() adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",) + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) @@ -2911,9 +3059,12 @@ def test_cloud_build_transport_auth_adc(): # ADC credentials. with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudBuildGrpcTransport(host="squid.clam.whelk") + transports.CloudBuildGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",) + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) @@ -3001,6 +3152,7 @@ def test_cloud_build_grpc_transport_channel_mtls_with_client_cert_source( credentials_file=None, scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -3035,6 +3187,7 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_client_cert_source credentials_file=None, scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -3071,6 +3224,7 @@ def test_cloud_build_grpc_transport_channel_mtls_with_adc( credentials_file=None, scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -3107,6 +3261,7 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( credentials_file=None, scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel From 39ebad27764f22edb8f803f95a5f5ac29eb93919 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 31 Jul 2020 08:55:42 -0700 Subject: [PATCH 2/6] feat(python-library): changes to docs job * feat(python-library): changes to docs job * migrate to Trampoline V2 * add docs-presubmit job * create docfx yaml files and upload them to another bucket * remove redundant envvars Source-Author: Takashi Matsuo Source-Date: Wed Jul 29 16:15:18 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: f07cb4446192952f19be3056957f56d180586055 Source-Link: https://github.com/googleapis/synthtool/commit/f07cb4446192952f19be3056957f56d180586055 --- docs/conf.py | 71 +- google/cloud/devtools/cloudbuild/__init__.py | 118 +- .../cloud/devtools/cloudbuild_v1/__init__.py | 88 +- .../services/cloud_build/__init__.py | 4 +- .../services/cloud_build/async_client.py | 534 +++-- .../services/cloud_build/client.py | 533 +++-- .../services/cloud_build/pagers.py | 68 +- .../cloud_build/transports/__init__.py | 10 +- .../services/cloud_build/transports/base.py | 293 +-- .../services/cloud_build/transports/grpc.py | 295 ++- .../cloud_build/transports/grpc_asyncio.py | 311 ++- .../devtools/cloudbuild_v1/types/__init__.py | 132 +- .../cloudbuild_v1/types/cloudbuild.py | 368 +-- synth.metadata | 2 +- .../gapic/cloudbuild_v1/test_cloud_build.py | 1977 +++++++++-------- 15 files changed, 2580 insertions(+), 2224 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2d0ebebb..97a8f600 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,22 @@ # -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # -# google-cloud-build documentation build configuration file +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# google-devtools-cloudbuild documentation build configuration file # # This file is execfile()d with the current directory set to its # containing dir. @@ -20,7 +36,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "" +__version__ = "0.1.0" # -- General configuration ------------------------------------------------ @@ -38,21 +54,23 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", - "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_options = {"members": True} +autodoc_default_flags = ["members"] autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -62,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-build" -copyright = u"2019, Google" -author = u"Google APIs" +project = u"google-devtools-cloudbuild" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -130,9 +148,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-build", + "description": "Google Devtools Client Libraries for Python", "github_user": "googleapis", - "github_repo": "python-cloudbuild", + "github_repo": "google-cloud-python", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -161,7 +179,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +# html_static_path = [] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -224,7 +242,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-build-doc" +htmlhelp_basename = "google-devtools-cloudbuild-doc" # -- Options for warnings ------------------------------------------------------ @@ -242,13 +260,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', + # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', + # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - #'preamble': '', + # 'preamble': '', # Latex figure (float) alignment - #'figure_align': 'htbp', + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -257,8 +275,8 @@ latex_documents = [ ( master_doc, - "google-cloud-build.tex", - u"google-cloud-build Documentation", + "google-devtools-cloudbuild.tex", + u"google-devtools-cloudbuild Documentation", author, "manual", ) @@ -292,8 +310,8 @@ man_pages = [ ( master_doc, - "google-cloud-build", - u"google-cloud-build Documentation", + "google-devtools-cloudbuild", + u"Google Devtools Cloudbuild Documentation", [author], 1, ) @@ -311,11 +329,11 @@ texinfo_documents = [ ( master_doc, - "google-cloud-build", - u"google-cloud-build Documentation", + "google-devtools-cloudbuild", + u"google-devtools-cloudbuild Documentation", author, - "google-cloud-build", - "google-cloud-build Library", + "google-devtools-cloudbuild", + "GAPIC library for Google Devtools Cloudbuild API", "APIs", ) ] @@ -336,9 +354,12 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), } diff --git a/google/cloud/devtools/cloudbuild/__init__.py b/google/cloud/devtools/cloudbuild/__init__.py index a8ad9f88..75bb1b23 100644 --- a/google/cloud/devtools/cloudbuild/__init__.py +++ b/google/cloud/devtools/cloudbuild/__init__.py @@ -15,12 +15,8 @@ # limitations under the License. # -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import ( - CloudBuildAsyncClient, -) -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import ( - CloudBuildClient, -) +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build @@ -31,13 +27,9 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( - CreateBuildTriggerRequest, -) +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( - DeleteBuildTriggerRequest, -) +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest @@ -45,12 +37,8 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( - ListBuildTriggersRequest, -) -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( - ListBuildTriggersResponse, -) +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest @@ -67,58 +55,56 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( - UpdateBuildTriggerRequest, -) +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerConfig from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool __all__ = ( - "ArtifactResult", - "Artifacts", - "Build", - "BuildOperationMetadata", - "BuildOptions", - "BuildStep", - "BuildTrigger", - "BuiltImage", - "CancelBuildRequest", - "CloudBuildAsyncClient", - "CloudBuildClient", - "CreateBuildRequest", - "CreateBuildTriggerRequest", - "CreateWorkerPoolRequest", - "DeleteBuildTriggerRequest", - "DeleteWorkerPoolRequest", - "FileHashes", - "GetBuildRequest", - "GetBuildTriggerRequest", - "GetWorkerPoolRequest", - "GitHubEventsConfig", - "Hash", - "ListBuildTriggersRequest", - "ListBuildTriggersResponse", - "ListBuildsRequest", - "ListBuildsResponse", - "ListWorkerPoolsRequest", - "ListWorkerPoolsResponse", - "Network", - "PullRequestFilter", - "PushFilter", - "RepoSource", - "Results", - "RetryBuildRequest", - "RunBuildTriggerRequest", - "Secret", - "Source", - "SourceProvenance", - "StorageSource", - "TimeSpan", - "UpdateBuildTriggerRequest", - "UpdateWorkerPoolRequest", - "Volume", - "WorkerConfig", - "WorkerPool", + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CloudBuildAsyncClient', + 'CloudBuildClient', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'Hash', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'Network', + 'PullRequestFilter', + 'PushFilter', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolRequest', + 'Volume', + 'WorkerConfig', + 'WorkerPool', ) diff --git a/google/cloud/devtools/cloudbuild_v1/__init__.py b/google/cloud/devtools/cloudbuild_v1/__init__.py index 59a2ac89..d30f054f 100644 --- a/google/cloud/devtools/cloudbuild_v1/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -62,48 +62,48 @@ __all__ = ( - "ArtifactResult", - "Artifacts", - "Build", - "BuildOperationMetadata", - "BuildOptions", - "BuildStep", - "BuildTrigger", - "BuiltImage", - "CancelBuildRequest", - "CreateBuildRequest", - "CreateBuildTriggerRequest", - "CreateWorkerPoolRequest", - "DeleteBuildTriggerRequest", - "DeleteWorkerPoolRequest", - "FileHashes", - "GetBuildRequest", - "GetBuildTriggerRequest", - "GetWorkerPoolRequest", - "GitHubEventsConfig", - "Hash", - "ListBuildTriggersRequest", - "ListBuildTriggersResponse", - "ListBuildsRequest", - "ListBuildsResponse", - "ListWorkerPoolsRequest", - "ListWorkerPoolsResponse", - "Network", - "PullRequestFilter", - "PushFilter", - "RepoSource", - "Results", - "RetryBuildRequest", - "RunBuildTriggerRequest", - "Secret", - "Source", - "SourceProvenance", - "StorageSource", - "TimeSpan", - "UpdateBuildTriggerRequest", - "UpdateWorkerPoolRequest", - "Volume", - "WorkerConfig", - "WorkerPool", - "CloudBuildClient", + 'ArtifactResult', + 'Artifacts', + 'Build', + 'BuildOperationMetadata', + 'BuildOptions', + 'BuildStep', + 'BuildTrigger', + 'BuiltImage', + 'CancelBuildRequest', + 'CreateBuildRequest', + 'CreateBuildTriggerRequest', + 'CreateWorkerPoolRequest', + 'DeleteBuildTriggerRequest', + 'DeleteWorkerPoolRequest', + 'FileHashes', + 'GetBuildRequest', + 'GetBuildTriggerRequest', + 'GetWorkerPoolRequest', + 'GitHubEventsConfig', + 'Hash', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', + 'Network', + 'PullRequestFilter', + 'PushFilter', + 'RepoSource', + 'Results', + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'Secret', + 'Source', + 'SourceProvenance', + 'StorageSource', + 'TimeSpan', + 'UpdateBuildTriggerRequest', + 'UpdateWorkerPoolRequest', + 'Volume', + 'WorkerConfig', + 'WorkerPool', +'CloudBuildClient', ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py index 51798087..b57f50ba 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py @@ -19,6 +19,6 @@ from .async_client import CloudBuildAsyncClient __all__ = ( - "CloudBuildClient", - "CloudBuildAsyncClient", + 'CloudBuildClient', + 'CloudBuildAsyncClient', ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 96421e96..3e7c9d35 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -21,12 +21,12 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.oauth2 import service_account # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.api_core import operation from google.api_core import operation_async @@ -59,17 +59,13 @@ class CloudBuildAsyncClient: from_service_account_file = CloudBuildClient.from_service_account_file from_service_account_json = from_service_account_file - get_transport_class = functools.partial( - type(CloudBuildClient).get_transport_class, type(CloudBuildClient) - ) + get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) - def __init__( - self, - *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - ) -> None: + def __init__(self, *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudBuildTransport] = 'grpc_asyncio', + client_options: ClientOptions = None, + ) -> None: """Instantiate the cloud build client. Args: @@ -101,19 +97,20 @@ def __init__( """ self._client = CloudBuildClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, ) - async def create_build( - self, - request: cloudbuild.CreateBuildRequest = None, - *, - project_id: str = None, - build: cloudbuild.Build = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def create_build(self, + request: cloudbuild.CreateBuildRequest = None, + *, + project_id: str = None, + build: cloudbuild.Build = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Starts a build with the specified configuration. This method returns a long-running ``Operation``, which includes @@ -174,10 +171,8 @@ async def create_build( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, build]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.CreateBuildRequest(request) @@ -198,7 +193,12 @@ async def create_build( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -211,16 +211,15 @@ async def create_build( # Done; return the response. return response - async def get_build( - self, - request: cloudbuild.GetBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + async def get_build(self, + request: cloudbuild.GetBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Returns information about a previously requested build. The ``Build`` that is returned includes its status (such as @@ -277,10 +276,8 @@ async def get_build( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.GetBuildRequest(request) @@ -301,7 +298,8 @@ async def get_build( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -309,21 +307,25 @@ async def get_build( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def list_builds( - self, - request: cloudbuild.ListBuildsRequest = None, - *, - project_id: str = None, - filter: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsAsyncPager: + async def list_builds(self, + request: cloudbuild.ListBuildsRequest = None, + *, + project_id: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsAsyncPager: r"""Lists previously requested builds. Previously requested builds may still be in-progress, or may have finished successfully or unsuccessfully. @@ -361,10 +363,8 @@ async def list_builds( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, filter]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.ListBuildsRequest(request) @@ -385,7 +385,8 @@ async def list_builds( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -393,27 +394,34 @@ async def list_builds( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBuildsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - async def cancel_build( - self, - request: cloudbuild.CancelBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + async def cancel_build(self, + request: cloudbuild.CancelBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Cancels a build in progress. Args: @@ -466,10 +474,8 @@ async def cancel_build( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.CancelBuildRequest(request) @@ -490,21 +496,25 @@ async def cancel_build( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def retry_build( - self, - request: cloudbuild.RetryBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def retry_build(self, + request: cloudbuild.RetryBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a new build based on the specified build. This method creates a new build using the original build @@ -591,10 +601,8 @@ async def retry_build( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.RetryBuildRequest(request) @@ -615,7 +623,12 @@ async def retry_build( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -628,16 +641,15 @@ async def retry_build( # Done; return the response. return response - async def create_build_trigger( - self, - request: cloudbuild.CreateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def create_build_trigger(self, + request: cloudbuild.CreateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. This API is experimental. @@ -675,10 +687,8 @@ async def create_build_trigger( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.CreateBuildTriggerRequest(request) @@ -699,21 +709,25 @@ async def create_build_trigger( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def get_build_trigger( - self, - request: cloudbuild.GetBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def get_build_trigger(self, + request: cloudbuild.GetBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. This API is experimental. @@ -752,10 +766,8 @@ async def get_build_trigger( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.GetBuildTriggerRequest(request) @@ -776,7 +788,8 @@ async def get_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -784,20 +797,24 @@ async def get_build_trigger( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def list_build_triggers( - self, - request: cloudbuild.ListBuildTriggersRequest = None, - *, - project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersAsyncPager: + async def list_build_triggers(self, + request: cloudbuild.ListBuildTriggersRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersAsyncPager: r"""Lists existing ``BuildTrigger``\ s. This API is experimental. @@ -831,10 +848,8 @@ async def list_build_triggers( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.ListBuildTriggersRequest(request) @@ -853,7 +868,8 @@ async def list_build_triggers( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -861,27 +877,34 @@ async def list_build_triggers( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBuildTriggersAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - async def delete_build_trigger( - self, - request: cloudbuild.DeleteBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + async def delete_build_trigger(self, + request: cloudbuild.DeleteBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -911,10 +934,8 @@ async def delete_build_trigger( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.DeleteBuildTriggerRequest(request) @@ -935,7 +956,8 @@ async def delete_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -944,20 +966,22 @@ async def delete_build_trigger( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) - async def update_build_trigger( - self, - request: cloudbuild.UpdateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def update_build_trigger(self, + request: cloudbuild.UpdateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1000,10 +1024,8 @@ async def update_build_trigger( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id, trigger]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.UpdateBuildTriggerRequest(request) @@ -1026,22 +1048,26 @@ async def update_build_trigger( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def run_build_trigger( - self, - request: cloudbuild.RunBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - source: cloudbuild.RepoSource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def run_build_trigger(self, + request: cloudbuild.RunBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + source: cloudbuild.RepoSource = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Runs a ``BuildTrigger`` at a particular source revision. Args: @@ -1105,10 +1131,8 @@ async def run_build_trigger( # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id, source]): - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') request = cloudbuild.RunBuildTriggerRequest(request) @@ -1131,7 +1155,12 @@ async def run_build_trigger( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1144,14 +1173,13 @@ async def run_build_trigger( # Done; return the response. return response - async def create_worker_pool( - self, - request: cloudbuild.CreateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def create_worker_pool(self, + request: cloudbuild.CreateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Creates a ``WorkerPool`` to run the builds, and returns the new worker pool. @@ -1197,19 +1225,23 @@ async def create_worker_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def get_worker_pool( - self, - request: cloudbuild.GetWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def get_worker_pool(self, + request: cloudbuild.GetWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Returns information about a ``WorkerPool``. This API is experimental. @@ -1254,7 +1286,8 @@ async def get_worker_pool( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -1262,19 +1295,23 @@ async def get_worker_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def delete_worker_pool( - self, - request: cloudbuild.DeleteWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + async def delete_worker_pool(self, + request: cloudbuild.DeleteWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``WorkerPool`` by its project ID and WorkerPool name. This API is experimental. @@ -1303,17 +1340,19 @@ async def delete_worker_pool( # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) - async def update_worker_pool( - self, - request: cloudbuild.UpdateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def update_worker_pool(self, + request: cloudbuild.UpdateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Update a ``WorkerPool``. This API is experimental. @@ -1357,19 +1396,23 @@ async def update_worker_pool( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - async def list_worker_pools( - self, - request: cloudbuild.ListWorkerPoolsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ListWorkerPoolsResponse: + async def list_worker_pools(self, + request: cloudbuild.ListWorkerPoolsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListWorkerPoolsResponse: r"""List project's ``WorkerPools``. This API is experimental. @@ -1401,7 +1444,8 @@ async def list_worker_pools( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -1409,20 +1453,32 @@ async def list_worker_pools( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response + + + + + try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - "google-devtools-cloudbuild", + 'google-devtools-cloudbuild', ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() -__all__ = ("CloudBuildAsyncClient",) +__all__ = ( + 'CloudBuildAsyncClient', +) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index e11623af..dc819666 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -21,14 +21,14 @@ from typing import Callable, Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore from google.api_core import operation from google.api_core import operation_async @@ -49,12 +49,13 @@ class CloudBuildClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] - _transport_registry["grpc"] = CloudBuildGrpcTransport - _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport + _transport_registry['grpc'] = CloudBuildGrpcTransport + _transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[CloudBuildTransport]: + def get_transport_class(cls, + label: str = None, + ) -> Type[CloudBuildTransport]: """Return an appropriate transport class. Args: @@ -113,7 +114,7 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" + DEFAULT_ENDPOINT = 'cloudbuild.googleapis.com' DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @@ -132,19 +133,18 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: {@api.name}: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file - def __init__( - self, - *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = None, - client_options: ClientOptions = None, - ) -> None: + def __init__(self, *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudBuildTransport] = None, + client_options: ClientOptions = None, + ) -> None: """Instantiate the cloud build client. Args: @@ -191,9 +191,7 @@ def __init__( or mtls.has_default_client_cert_source() ) client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( @@ -206,10 +204,8 @@ def __init__( if isinstance(transport, CloudBuildTransport): # transport is a CloudBuildTransport instance. if credentials or client_options.credentials_file: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) + raise ValueError('When providing a transport instance, ' + 'provide its credentials directly.') if client_options.scopes: raise ValueError( "When providing a transport instance, " @@ -228,16 +224,15 @@ def __init__( quota_project_id=client_options.quota_project_id, ) - def create_build( - self, - request: cloudbuild.CreateBuildRequest = None, - *, - project_id: str = None, - build: cloudbuild.Build = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_build(self, + request: cloudbuild.CreateBuildRequest = None, + *, + project_id: str = None, + build: cloudbuild.Build = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Starts a build with the specified configuration. This method returns a long-running ``Operation``, which includes @@ -299,10 +294,8 @@ def create_build( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, build]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CreateBuildRequest. @@ -324,7 +317,12 @@ def create_build( rpc = self._transport._wrapped_methods[self._transport.create_build] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -337,16 +335,15 @@ def create_build( # Done; return the response. return response - def get_build( - self, - request: cloudbuild.GetBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + def get_build(self, + request: cloudbuild.GetBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Returns information about a previously requested build. The ``Build`` that is returned includes its status (such as @@ -404,10 +401,8 @@ def get_build( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.GetBuildRequest. @@ -429,21 +424,25 @@ def get_build( rpc = self._transport._wrapped_methods[self._transport.get_build] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def list_builds( - self, - request: cloudbuild.ListBuildsRequest = None, - *, - project_id: str = None, - filter: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsPager: + def list_builds(self, + request: cloudbuild.ListBuildsRequest = None, + *, + project_id: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsPager: r"""Lists previously requested builds. Previously requested builds may still be in-progress, or may have finished successfully or unsuccessfully. @@ -482,10 +481,8 @@ def list_builds( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, filter]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.ListBuildsRequest. @@ -507,27 +504,34 @@ def list_builds( rpc = self._transport._wrapped_methods[self._transport.list_builds] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBuildsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - def cancel_build( - self, - request: cloudbuild.CancelBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + def cancel_build(self, + request: cloudbuild.CancelBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Cancels a build in progress. Args: @@ -581,10 +585,8 @@ def cancel_build( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CancelBuildRequest. @@ -606,21 +608,25 @@ def cancel_build( rpc = self._transport._wrapped_methods[self._transport.cancel_build] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def retry_build( - self, - request: cloudbuild.RetryBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def retry_build(self, + request: cloudbuild.RetryBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new build based on the specified build. This method creates a new build using the original build @@ -708,10 +714,8 @@ def retry_build( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.RetryBuildRequest. @@ -733,7 +737,12 @@ def retry_build( rpc = self._transport._wrapped_methods[self._transport.retry_build] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -746,16 +755,15 @@ def retry_build( # Done; return the response. return response - def create_build_trigger( - self, - request: cloudbuild.CreateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def create_build_trigger(self, + request: cloudbuild.CreateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. This API is experimental. @@ -794,10 +802,8 @@ def create_build_trigger( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CreateBuildTriggerRequest. @@ -819,21 +825,25 @@ def create_build_trigger( rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def get_build_trigger( - self, - request: cloudbuild.GetBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def get_build_trigger(self, + request: cloudbuild.GetBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. This API is experimental. @@ -873,10 +883,8 @@ def get_build_trigger( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.GetBuildTriggerRequest. @@ -898,20 +906,24 @@ def get_build_trigger( rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def list_build_triggers( - self, - request: cloudbuild.ListBuildTriggersRequest = None, - *, - project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersPager: + def list_build_triggers(self, + request: cloudbuild.ListBuildTriggersRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersPager: r"""Lists existing ``BuildTrigger``\ s. This API is experimental. @@ -946,10 +958,8 @@ def list_build_triggers( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.ListBuildTriggersRequest. @@ -969,27 +979,34 @@ def list_build_triggers( rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBuildTriggersPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. return response - def delete_build_trigger( - self, - request: cloudbuild.DeleteBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + def delete_build_trigger(self, + request: cloudbuild.DeleteBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1020,10 +1037,8 @@ def delete_build_trigger( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.DeleteBuildTriggerRequest. @@ -1046,20 +1061,22 @@ def delete_build_trigger( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) - def update_build_trigger( - self, - request: cloudbuild.UpdateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def update_build_trigger(self, + request: cloudbuild.UpdateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1103,10 +1120,8 @@ def update_build_trigger( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id, trigger]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.UpdateBuildTriggerRequest. @@ -1130,22 +1145,26 @@ def update_build_trigger( rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def run_build_trigger( - self, - request: cloudbuild.RunBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - source: cloudbuild.RepoSource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def run_build_trigger(self, + request: cloudbuild.RunBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + source: cloudbuild.RepoSource = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Runs a ``BuildTrigger`` at a particular source revision. Args: @@ -1210,10 +1229,8 @@ def run_build_trigger( # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id, source]) if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.RunBuildTriggerRequest. @@ -1237,7 +1254,12 @@ def run_build_trigger( rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1250,14 +1272,13 @@ def run_build_trigger( # Done; return the response. return response - def create_worker_pool( - self, - request: cloudbuild.CreateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def create_worker_pool(self, + request: cloudbuild.CreateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Creates a ``WorkerPool`` to run the builds, and returns the new worker pool. @@ -1304,19 +1325,23 @@ def create_worker_pool( rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def get_worker_pool( - self, - request: cloudbuild.GetWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def get_worker_pool(self, + request: cloudbuild.GetWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Returns information about a ``WorkerPool``. This API is experimental. @@ -1362,19 +1387,23 @@ def get_worker_pool( rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def delete_worker_pool( - self, - request: cloudbuild.DeleteWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + def delete_worker_pool(self, + request: cloudbuild.DeleteWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``WorkerPool`` by its project ID and WorkerPool name. This API is experimental. @@ -1404,17 +1433,19 @@ def delete_worker_pool( # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) - def update_worker_pool( - self, - request: cloudbuild.UpdateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def update_worker_pool(self, + request: cloudbuild.UpdateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Update a ``WorkerPool``. This API is experimental. @@ -1459,19 +1490,23 @@ def update_worker_pool( rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response - def list_worker_pools( - self, - request: cloudbuild.ListWorkerPoolsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ListWorkerPoolsResponse: + def list_worker_pools(self, + request: cloudbuild.ListWorkerPoolsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListWorkerPoolsResponse: r"""List project's ``WorkerPools``. This API is experimental. @@ -1504,20 +1539,32 @@ def list_worker_pools( rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response + + + + + try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - "google-devtools-cloudbuild", + 'google-devtools-cloudbuild', ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() -__all__ = ("CloudBuildClient",) +__all__ = ( + 'CloudBuildClient', +) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py index 52132bd1..53593f53 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py @@ -37,15 +37,12 @@ class ListBuildsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - - def __init__( - self, - method: Callable[..., cloudbuild.ListBuildsResponse], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): + def __init__(self, + method: Callable[..., cloudbuild.ListBuildsResponse], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -79,7 +76,7 @@ def __iter__(self) -> Iterable[cloudbuild.Build]: yield from page.builds def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) class ListBuildsAsyncPager: @@ -99,15 +96,12 @@ class ListBuildsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - - def __init__( - self, - method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -145,7 +139,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) class ListBuildTriggersPager: @@ -165,15 +159,12 @@ class ListBuildTriggersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - - def __init__( - self, - method: Callable[..., cloudbuild.ListBuildTriggersResponse], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): + def __init__(self, + method: Callable[..., cloudbuild.ListBuildTriggersResponse], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -207,7 +198,7 @@ def __iter__(self) -> Iterable[cloudbuild.BuildTrigger]: yield from page.triggers def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) class ListBuildTriggersAsyncPager: @@ -227,15 +218,12 @@ class ListBuildTriggersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - - def __init__( - self, - method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): + def __init__(self, + method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -273,4 +261,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py index cf9e2143..7239ab73 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py @@ -25,12 +25,12 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] -_transport_registry["grpc"] = CloudBuildGrpcTransport -_transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport +_transport_registry['grpc'] = CloudBuildGrpcTransport +_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport __all__ = ( - "CloudBuildTransport", - "CloudBuildGrpcTransport", - "CloudBuildGrpcAsyncIOTransport", + 'CloudBuildTransport', + 'CloudBuildGrpcTransport', + 'CloudBuildGrpcAsyncIOTransport', ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index 32d9f153..3b1b21ee 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -21,7 +21,7 @@ from google import auth from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -34,28 +34,28 @@ try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - "google-devtools-cloudbuild", + 'google-devtools-cloudbuild', ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() - class CloudBuildTransport(abc.ABC): """Abstract transport class for CloudBuild.""" - AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, - **kwargs, - ) -> None: + self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -73,26 +73,24 @@ def __init__( and quota. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" + if ':' not in host: + host += ':443' self._host = host # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) + raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id + ) elif credentials is None: - credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id - ) + credentials, _ = auth.default(scopes=scopes, quota_project_id=quota_project_id) # Save the credentials. self._credentials = credentials @@ -104,7 +102,9 @@ def _prep_wrapped_messages(self): # Precompute the wrapped methods. self._wrapped_methods = { self.create_build: gapic_v1.method.wrap_method( - self.create_build, default_timeout=600.0, client_info=_client_info, + self.create_build, + default_timeout=600.0, + client_info=_client_info, ), self.get_build: gapic_v1.method.wrap_method( self.get_build, @@ -113,7 +113,8 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -126,17 +127,22 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, client_info=_client_info, ), self.cancel_build: gapic_v1.method.wrap_method( - self.cancel_build, default_timeout=600.0, client_info=_client_info, + self.cancel_build, + default_timeout=600.0, + client_info=_client_info, ), self.retry_build: gapic_v1.method.wrap_method( - self.retry_build, default_timeout=600.0, client_info=_client_info, + self.retry_build, + default_timeout=600.0, + client_info=_client_info, ), self.create_build_trigger: gapic_v1.method.wrap_method( self.create_build_trigger, @@ -150,7 +156,8 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -163,7 +170,8 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -176,7 +184,8 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -188,7 +197,9 @@ def _prep_wrapped_messages(self): client_info=_client_info, ), self.run_build_trigger: gapic_v1.method.wrap_method( - self.run_build_trigger, default_timeout=600.0, client_info=_client_info, + self.run_build_trigger, + default_timeout=600.0, + client_info=_client_info, ), self.create_worker_pool: gapic_v1.method.wrap_method( self.create_worker_pool, @@ -202,7 +213,8 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -225,12 +237,14 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, +exceptions.DeadlineExceeded, ), ), default_timeout=600.0, client_info=_client_info, ), + } @property @@ -239,163 +253,150 @@ def operations_client(self) -> operations_v1.OperationsClient: raise NotImplementedError() @property - def create_build( - self, - ) -> typing.Callable[ - [cloudbuild.CreateBuildRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], - ]: + def create_build(self) -> typing.Callable[ + [cloudbuild.CreateBuildRequest], + typing.Union[ + operations.Operation, + typing.Awaitable[operations.Operation] + ]]: raise NotImplementedError() @property - def get_build( - self, - ) -> typing.Callable[ - [cloudbuild.GetBuildRequest], - typing.Union[cloudbuild.Build, typing.Awaitable[cloudbuild.Build]], - ]: + def get_build(self) -> typing.Callable[ + [cloudbuild.GetBuildRequest], + typing.Union[ + cloudbuild.Build, + typing.Awaitable[cloudbuild.Build] + ]]: raise NotImplementedError() @property - def list_builds( - self, - ) -> typing.Callable[ - [cloudbuild.ListBuildsRequest], - typing.Union[ - cloudbuild.ListBuildsResponse, - typing.Awaitable[cloudbuild.ListBuildsResponse], - ], - ]: + def list_builds(self) -> typing.Callable[ + [cloudbuild.ListBuildsRequest], + typing.Union[ + cloudbuild.ListBuildsResponse, + typing.Awaitable[cloudbuild.ListBuildsResponse] + ]]: raise NotImplementedError() @property - def cancel_build( - self, - ) -> typing.Callable[ - [cloudbuild.CancelBuildRequest], - typing.Union[cloudbuild.Build, typing.Awaitable[cloudbuild.Build]], - ]: + def cancel_build(self) -> typing.Callable[ + [cloudbuild.CancelBuildRequest], + typing.Union[ + cloudbuild.Build, + typing.Awaitable[cloudbuild.Build] + ]]: raise NotImplementedError() @property - def retry_build( - self, - ) -> typing.Callable[ - [cloudbuild.RetryBuildRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], - ]: + def retry_build(self) -> typing.Callable[ + [cloudbuild.RetryBuildRequest], + typing.Union[ + operations.Operation, + typing.Awaitable[operations.Operation] + ]]: raise NotImplementedError() @property - def create_build_trigger( - self, - ) -> typing.Callable[ - [cloudbuild.CreateBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] - ], - ]: + def create_build_trigger(self) -> typing.Callable[ + [cloudbuild.CreateBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, + typing.Awaitable[cloudbuild.BuildTrigger] + ]]: raise NotImplementedError() @property - def get_build_trigger( - self, - ) -> typing.Callable[ - [cloudbuild.GetBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] - ], - ]: + def get_build_trigger(self) -> typing.Callable[ + [cloudbuild.GetBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, + typing.Awaitable[cloudbuild.BuildTrigger] + ]]: raise NotImplementedError() @property - def list_build_triggers( - self, - ) -> typing.Callable[ - [cloudbuild.ListBuildTriggersRequest], - typing.Union[ - cloudbuild.ListBuildTriggersResponse, - typing.Awaitable[cloudbuild.ListBuildTriggersResponse], - ], - ]: + def list_build_triggers(self) -> typing.Callable[ + [cloudbuild.ListBuildTriggersRequest], + typing.Union[ + cloudbuild.ListBuildTriggersResponse, + typing.Awaitable[cloudbuild.ListBuildTriggersResponse] + ]]: raise NotImplementedError() @property - def delete_build_trigger( - self, - ) -> typing.Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], - ]: + def delete_build_trigger(self) -> typing.Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + typing.Union[ + empty.Empty, + typing.Awaitable[empty.Empty] + ]]: raise NotImplementedError() @property - def update_build_trigger( - self, - ) -> typing.Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] - ], - ]: + def update_build_trigger(self) -> typing.Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, + typing.Awaitable[cloudbuild.BuildTrigger] + ]]: raise NotImplementedError() @property - def run_build_trigger( - self, - ) -> typing.Callable[ - [cloudbuild.RunBuildTriggerRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], - ]: + def run_build_trigger(self) -> typing.Callable[ + [cloudbuild.RunBuildTriggerRequest], + typing.Union[ + operations.Operation, + typing.Awaitable[operations.Operation] + ]]: raise NotImplementedError() @property - def create_worker_pool( - self, - ) -> typing.Callable[ - [cloudbuild.CreateWorkerPoolRequest], - typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], - ]: + def create_worker_pool(self) -> typing.Callable[ + [cloudbuild.CreateWorkerPoolRequest], + typing.Union[ + cloudbuild.WorkerPool, + typing.Awaitable[cloudbuild.WorkerPool] + ]]: raise NotImplementedError() @property - def get_worker_pool( - self, - ) -> typing.Callable[ - [cloudbuild.GetWorkerPoolRequest], - typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], - ]: + def get_worker_pool(self) -> typing.Callable[ + [cloudbuild.GetWorkerPoolRequest], + typing.Union[ + cloudbuild.WorkerPool, + typing.Awaitable[cloudbuild.WorkerPool] + ]]: raise NotImplementedError() @property - def delete_worker_pool( - self, - ) -> typing.Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], - ]: + def delete_worker_pool(self) -> typing.Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + typing.Union[ + empty.Empty, + typing.Awaitable[empty.Empty] + ]]: raise NotImplementedError() @property - def update_worker_pool( - self, - ) -> typing.Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], - ]: + def update_worker_pool(self) -> typing.Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + typing.Union[ + cloudbuild.WorkerPool, + typing.Awaitable[cloudbuild.WorkerPool] + ]]: raise NotImplementedError() @property - def list_worker_pools( - self, - ) -> typing.Callable[ - [cloudbuild.ListWorkerPoolsRequest], - typing.Union[ - cloudbuild.ListWorkerPoolsResponse, - typing.Awaitable[cloudbuild.ListWorkerPoolsResponse], - ], - ]: + def list_worker_pools(self) -> typing.Callable[ + [cloudbuild.ListWorkerPoolsRequest], + typing.Union[ + cloudbuild.ListWorkerPoolsResponse, + typing.Awaitable[cloudbuild.ListWorkerPoolsResponse] + ]]: raise NotImplementedError() -__all__ = ("CloudBuildTransport",) +__all__ = ( + 'CloudBuildTransport', +) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index c8affe84..adb9944b 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -17,10 +17,10 @@ from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import grpc_helpers # type: ignore +from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -52,21 +52,17 @@ class CloudBuildGrpcTransport(CloudBuildTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ - _stubs: Dict[str, Callable] - def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None - ) -> None: + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None) -> None: """Instantiate the transport. Args: @@ -109,16 +105,10 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -152,15 +142,13 @@ def __init__( ) @classmethod - def create_channel( - cls, - host: str = "cloudbuild.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs - ) -> grpc.Channel: + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: address (Optionsl[str]): The host for the channel to use. @@ -205,9 +193,10 @@ def grpc_channel(self) -> grpc.Channel: """ # Sanity check: Only create a new channel if we do not already # have one. - if not hasattr(self, "_grpc_channel"): + if not hasattr(self, '_grpc_channel'): self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, + self._host, + credentials=self._credentials, ) # Return the channel from cache. @@ -221,18 +210,18 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( + if 'operations_client' not in self.__dict__: + self.__dict__['operations_client'] = operations_v1.OperationsClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self.__dict__['operations_client'] @property - def create_build( - self, - ) -> Callable[[cloudbuild.CreateBuildRequest], operations.Operation]: + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + operations.Operation]: r"""Return a callable for the create build method over gRPC. Starts a build with the specified configuration. @@ -251,16 +240,18 @@ def create_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_build" not in self._stubs: - self._stubs["create_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', request_serializer=cloudbuild.CreateBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["create_build"] + return self._stubs['create_build'] @property - def get_build(self) -> Callable[[cloudbuild.GetBuildRequest], cloudbuild.Build]: + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + cloudbuild.Build]: r"""Return a callable for the get build method over gRPC. Returns information about a previously requested build. @@ -279,18 +270,18 @@ def get_build(self) -> Callable[[cloudbuild.GetBuildRequest], cloudbuild.Build]: # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_build" not in self._stubs: - self._stubs["get_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', request_serializer=cloudbuild.GetBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs["get_build"] + return self._stubs['get_build'] @property - def list_builds( - self, - ) -> Callable[[cloudbuild.ListBuildsRequest], cloudbuild.ListBuildsResponse]: + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + cloudbuild.ListBuildsResponse]: r"""Return a callable for the list builds method over gRPC. Lists previously requested builds. @@ -307,18 +298,18 @@ def list_builds( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_builds" not in self._stubs: - self._stubs["list_builds"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', request_serializer=cloudbuild.ListBuildsRequest.serialize, response_deserializer=cloudbuild.ListBuildsResponse.deserialize, ) - return self._stubs["list_builds"] + return self._stubs['list_builds'] @property - def cancel_build( - self, - ) -> Callable[[cloudbuild.CancelBuildRequest], cloudbuild.Build]: + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + cloudbuild.Build]: r"""Return a callable for the cancel build method over gRPC. Cancels a build in progress. @@ -333,18 +324,18 @@ def cancel_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "cancel_build" not in self._stubs: - self._stubs["cancel_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', request_serializer=cloudbuild.CancelBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs["cancel_build"] + return self._stubs['cancel_build'] @property - def retry_build( - self, - ) -> Callable[[cloudbuild.RetryBuildRequest], operations.Operation]: + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + operations.Operation]: r"""Return a callable for the retry build method over gRPC. Creates a new build based on the specified build. @@ -388,18 +379,18 @@ def retry_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "retry_build" not in self._stubs: - self._stubs["retry_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', request_serializer=cloudbuild.RetryBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["retry_build"] + return self._stubs['retry_build'] @property - def create_build_trigger( - self, - ) -> Callable[[cloudbuild.CreateBuildTriggerRequest], cloudbuild.BuildTrigger]: + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + cloudbuild.BuildTrigger]: r"""Return a callable for the create build trigger method over gRPC. Creates a new ``BuildTrigger``. @@ -416,18 +407,18 @@ def create_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_build_trigger" not in self._stubs: - self._stubs["create_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["create_build_trigger"] + return self._stubs['create_build_trigger'] @property - def get_build_trigger( - self, - ) -> Callable[[cloudbuild.GetBuildTriggerRequest], cloudbuild.BuildTrigger]: + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + cloudbuild.BuildTrigger]: r"""Return a callable for the get build trigger method over gRPC. Returns information about a ``BuildTrigger``. @@ -444,20 +435,18 @@ def get_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_build_trigger" not in self._stubs: - self._stubs["get_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["get_build_trigger"] + return self._stubs['get_build_trigger'] @property - def list_build_triggers( - self, - ) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], cloudbuild.ListBuildTriggersResponse - ]: + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + cloudbuild.ListBuildTriggersResponse]: r"""Return a callable for the list build triggers method over gRPC. Lists existing ``BuildTrigger``\ s. @@ -474,18 +463,18 @@ def list_build_triggers( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_build_triggers" not in self._stubs: - self._stubs["list_build_triggers"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, ) - return self._stubs["list_build_triggers"] + return self._stubs['list_build_triggers'] @property - def delete_build_trigger( - self, - ) -> Callable[[cloudbuild.DeleteBuildTriggerRequest], empty.Empty]: + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + empty.Empty]: r"""Return a callable for the delete build trigger method over gRPC. Deletes a ``BuildTrigger`` by its project ID and trigger ID. @@ -502,18 +491,18 @@ def delete_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_build_trigger" not in self._stubs: - self._stubs["delete_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs["delete_build_trigger"] + return self._stubs['delete_build_trigger'] @property - def update_build_trigger( - self, - ) -> Callable[[cloudbuild.UpdateBuildTriggerRequest], cloudbuild.BuildTrigger]: + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + cloudbuild.BuildTrigger]: r"""Return a callable for the update build trigger method over gRPC. Updates a ``BuildTrigger`` by its project ID and trigger ID. @@ -530,18 +519,18 @@ def update_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_build_trigger" not in self._stubs: - self._stubs["update_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["update_build_trigger"] + return self._stubs['update_build_trigger'] @property - def run_build_trigger( - self, - ) -> Callable[[cloudbuild.RunBuildTriggerRequest], operations.Operation]: + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + operations.Operation]: r"""Return a callable for the run build trigger method over gRPC. Runs a ``BuildTrigger`` at a particular source revision. @@ -556,18 +545,18 @@ def run_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "run_build_trigger" not in self._stubs: - self._stubs["run_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["run_build_trigger"] + return self._stubs['run_build_trigger'] @property - def create_worker_pool( - self, - ) -> Callable[[cloudbuild.CreateWorkerPoolRequest], cloudbuild.WorkerPool]: + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + cloudbuild.WorkerPool]: r"""Return a callable for the create worker pool method over gRPC. Creates a ``WorkerPool`` to run the builds, and returns the new @@ -585,18 +574,18 @@ def create_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_worker_pool" not in self._stubs: - self._stubs["create_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool", + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["create_worker_pool"] + return self._stubs['create_worker_pool'] @property - def get_worker_pool( - self, - ) -> Callable[[cloudbuild.GetWorkerPoolRequest], cloudbuild.WorkerPool]: + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + cloudbuild.WorkerPool]: r"""Return a callable for the get worker pool method over gRPC. Returns information about a ``WorkerPool``. @@ -613,18 +602,18 @@ def get_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_worker_pool" not in self._stubs: - self._stubs["get_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool", + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["get_worker_pool"] + return self._stubs['get_worker_pool'] @property - def delete_worker_pool( - self, - ) -> Callable[[cloudbuild.DeleteWorkerPoolRequest], empty.Empty]: + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + empty.Empty]: r"""Return a callable for the delete worker pool method over gRPC. Deletes a ``WorkerPool`` by its project ID and WorkerPool name. @@ -641,18 +630,18 @@ def delete_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_worker_pool" not in self._stubs: - self._stubs["delete_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool", + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs["delete_worker_pool"] + return self._stubs['delete_worker_pool'] @property - def update_worker_pool( - self, - ) -> Callable[[cloudbuild.UpdateWorkerPoolRequest], cloudbuild.WorkerPool]: + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + cloudbuild.WorkerPool]: r"""Return a callable for the update worker pool method over gRPC. Update a ``WorkerPool``. @@ -669,20 +658,18 @@ def update_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_worker_pool" not in self._stubs: - self._stubs["update_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool", + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["update_worker_pool"] + return self._stubs['update_worker_pool'] @property - def list_worker_pools( - self, - ) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], cloudbuild.ListWorkerPoolsResponse - ]: + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + cloudbuild.ListWorkerPoolsResponse]: r"""Return a callable for the list worker pools method over gRPC. List project's ``WorkerPools``. @@ -699,13 +686,15 @@ def list_worker_pools( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_worker_pools" not in self._stubs: - self._stubs["list_worker_pools"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools", + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, ) - return self._stubs["list_worker_pools"] + return self._stubs['list_worker_pools'] -__all__ = ("CloudBuildGrpcTransport",) +__all__ = ( + 'CloudBuildGrpcTransport', +) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 817e30a5..98100d53 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -17,12 +17,12 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.devtools.cloudbuild_v1.types import cloudbuild @@ -57,15 +57,13 @@ class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel( - cls, - host: str = "cloudbuild.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: + def create_channel(cls, + host: str = 'cloudbuild.googleapis.com', + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: address (Optional[str]): The host for the channel to use. @@ -94,21 +92,19 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs, + **kwargs ) - def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - ) -> None: + def __init__(self, *, + host: str = 'cloudbuild.googleapis.com', + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: """Instantiate the transport. Args: @@ -152,11 +148,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -198,9 +190,10 @@ def grpc_channel(self) -> aio.Channel: """ # Sanity check: Only create a new channel if we do not already # have one. - if not hasattr(self, "_grpc_channel"): + if not hasattr(self, '_grpc_channel'): self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, + self._host, + credentials=self._credentials, ) # Return the channel from cache. @@ -214,18 +207,18 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + if 'operations_client' not in self.__dict__: + self.__dict__['operations_client'] = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self.__dict__['operations_client'] @property - def create_build( - self, - ) -> Callable[[cloudbuild.CreateBuildRequest], Awaitable[operations.Operation]]: + def create_build(self) -> Callable[ + [cloudbuild.CreateBuildRequest], + Awaitable[operations.Operation]]: r"""Return a callable for the create build method over gRPC. Starts a build with the specified configuration. @@ -244,18 +237,18 @@ def create_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_build" not in self._stubs: - self._stubs["create_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", + if 'create_build' not in self._stubs: + self._stubs['create_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', request_serializer=cloudbuild.CreateBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["create_build"] + return self._stubs['create_build'] @property - def get_build( - self, - ) -> Callable[[cloudbuild.GetBuildRequest], Awaitable[cloudbuild.Build]]: + def get_build(self) -> Callable[ + [cloudbuild.GetBuildRequest], + Awaitable[cloudbuild.Build]]: r"""Return a callable for the get build method over gRPC. Returns information about a previously requested build. @@ -274,20 +267,18 @@ def get_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_build" not in self._stubs: - self._stubs["get_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", + if 'get_build' not in self._stubs: + self._stubs['get_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', request_serializer=cloudbuild.GetBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs["get_build"] + return self._stubs['get_build'] @property - def list_builds( - self, - ) -> Callable[ - [cloudbuild.ListBuildsRequest], Awaitable[cloudbuild.ListBuildsResponse] - ]: + def list_builds(self) -> Callable[ + [cloudbuild.ListBuildsRequest], + Awaitable[cloudbuild.ListBuildsResponse]]: r"""Return a callable for the list builds method over gRPC. Lists previously requested builds. @@ -304,18 +295,18 @@ def list_builds( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_builds" not in self._stubs: - self._stubs["list_builds"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", + if 'list_builds' not in self._stubs: + self._stubs['list_builds'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', request_serializer=cloudbuild.ListBuildsRequest.serialize, response_deserializer=cloudbuild.ListBuildsResponse.deserialize, ) - return self._stubs["list_builds"] + return self._stubs['list_builds'] @property - def cancel_build( - self, - ) -> Callable[[cloudbuild.CancelBuildRequest], Awaitable[cloudbuild.Build]]: + def cancel_build(self) -> Callable[ + [cloudbuild.CancelBuildRequest], + Awaitable[cloudbuild.Build]]: r"""Return a callable for the cancel build method over gRPC. Cancels a build in progress. @@ -330,18 +321,18 @@ def cancel_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "cancel_build" not in self._stubs: - self._stubs["cancel_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", + if 'cancel_build' not in self._stubs: + self._stubs['cancel_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', request_serializer=cloudbuild.CancelBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs["cancel_build"] + return self._stubs['cancel_build'] @property - def retry_build( - self, - ) -> Callable[[cloudbuild.RetryBuildRequest], Awaitable[operations.Operation]]: + def retry_build(self) -> Callable[ + [cloudbuild.RetryBuildRequest], + Awaitable[operations.Operation]]: r"""Return a callable for the retry build method over gRPC. Creates a new build based on the specified build. @@ -385,20 +376,18 @@ def retry_build( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "retry_build" not in self._stubs: - self._stubs["retry_build"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", + if 'retry_build' not in self._stubs: + self._stubs['retry_build'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', request_serializer=cloudbuild.RetryBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["retry_build"] + return self._stubs['retry_build'] @property - def create_build_trigger( - self, - ) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] - ]: + def create_build_trigger(self) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: r"""Return a callable for the create build trigger method over gRPC. Creates a new ``BuildTrigger``. @@ -415,20 +404,18 @@ def create_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_build_trigger" not in self._stubs: - self._stubs["create_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", + if 'create_build_trigger' not in self._stubs: + self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["create_build_trigger"] + return self._stubs['create_build_trigger'] @property - def get_build_trigger( - self, - ) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] - ]: + def get_build_trigger(self) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: r"""Return a callable for the get build trigger method over gRPC. Returns information about a ``BuildTrigger``. @@ -445,21 +432,18 @@ def get_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_build_trigger" not in self._stubs: - self._stubs["get_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", + if 'get_build_trigger' not in self._stubs: + self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["get_build_trigger"] + return self._stubs['get_build_trigger'] @property - def list_build_triggers( - self, - ) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Awaitable[cloudbuild.ListBuildTriggersResponse], - ]: + def list_build_triggers(self) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Awaitable[cloudbuild.ListBuildTriggersResponse]]: r"""Return a callable for the list build triggers method over gRPC. Lists existing ``BuildTrigger``\ s. @@ -476,18 +460,18 @@ def list_build_triggers( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_build_triggers" not in self._stubs: - self._stubs["list_build_triggers"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", + if 'list_build_triggers' not in self._stubs: + self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, ) - return self._stubs["list_build_triggers"] + return self._stubs['list_build_triggers'] @property - def delete_build_trigger( - self, - ) -> Callable[[cloudbuild.DeleteBuildTriggerRequest], Awaitable[empty.Empty]]: + def delete_build_trigger(self) -> Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + Awaitable[empty.Empty]]: r"""Return a callable for the delete build trigger method over gRPC. Deletes a ``BuildTrigger`` by its project ID and trigger ID. @@ -504,20 +488,18 @@ def delete_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_build_trigger" not in self._stubs: - self._stubs["delete_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", + if 'delete_build_trigger' not in self._stubs: + self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs["delete_build_trigger"] + return self._stubs['delete_build_trigger'] @property - def update_build_trigger( - self, - ) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] - ]: + def update_build_trigger(self) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + Awaitable[cloudbuild.BuildTrigger]]: r"""Return a callable for the update build trigger method over gRPC. Updates a ``BuildTrigger`` by its project ID and trigger ID. @@ -534,18 +516,18 @@ def update_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_build_trigger" not in self._stubs: - self._stubs["update_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", + if 'update_build_trigger' not in self._stubs: + self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs["update_build_trigger"] + return self._stubs['update_build_trigger'] @property - def run_build_trigger( - self, - ) -> Callable[[cloudbuild.RunBuildTriggerRequest], Awaitable[operations.Operation]]: + def run_build_trigger(self) -> Callable[ + [cloudbuild.RunBuildTriggerRequest], + Awaitable[operations.Operation]]: r"""Return a callable for the run build trigger method over gRPC. Runs a ``BuildTrigger`` at a particular source revision. @@ -560,20 +542,18 @@ def run_build_trigger( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "run_build_trigger" not in self._stubs: - self._stubs["run_build_trigger"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", + if 'run_build_trigger' not in self._stubs: + self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs["run_build_trigger"] + return self._stubs['run_build_trigger'] @property - def create_worker_pool( - self, - ) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool] - ]: + def create_worker_pool(self) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], + Awaitable[cloudbuild.WorkerPool]]: r"""Return a callable for the create worker pool method over gRPC. Creates a ``WorkerPool`` to run the builds, and returns the new @@ -591,18 +571,18 @@ def create_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "create_worker_pool" not in self._stubs: - self._stubs["create_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool", + if 'create_worker_pool' not in self._stubs: + self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["create_worker_pool"] + return self._stubs['create_worker_pool'] @property - def get_worker_pool( - self, - ) -> Callable[[cloudbuild.GetWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool]]: + def get_worker_pool(self) -> Callable[ + [cloudbuild.GetWorkerPoolRequest], + Awaitable[cloudbuild.WorkerPool]]: r"""Return a callable for the get worker pool method over gRPC. Returns information about a ``WorkerPool``. @@ -619,18 +599,18 @@ def get_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "get_worker_pool" not in self._stubs: - self._stubs["get_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool", + if 'get_worker_pool' not in self._stubs: + self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["get_worker_pool"] + return self._stubs['get_worker_pool'] @property - def delete_worker_pool( - self, - ) -> Callable[[cloudbuild.DeleteWorkerPoolRequest], Awaitable[empty.Empty]]: + def delete_worker_pool(self) -> Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + Awaitable[empty.Empty]]: r"""Return a callable for the delete worker pool method over gRPC. Deletes a ``WorkerPool`` by its project ID and WorkerPool name. @@ -647,20 +627,18 @@ def delete_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_worker_pool" not in self._stubs: - self._stubs["delete_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool", + if 'delete_worker_pool' not in self._stubs: + self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs["delete_worker_pool"] + return self._stubs['delete_worker_pool'] @property - def update_worker_pool( - self, - ) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool] - ]: + def update_worker_pool(self) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + Awaitable[cloudbuild.WorkerPool]]: r"""Return a callable for the update worker pool method over gRPC. Update a ``WorkerPool``. @@ -677,21 +655,18 @@ def update_worker_pool( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "update_worker_pool" not in self._stubs: - self._stubs["update_worker_pool"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool", + if 'update_worker_pool' not in self._stubs: + self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs["update_worker_pool"] + return self._stubs['update_worker_pool'] @property - def list_worker_pools( - self, - ) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Awaitable[cloudbuild.ListWorkerPoolsResponse], - ]: + def list_worker_pools(self) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Awaitable[cloudbuild.ListWorkerPoolsResponse]]: r"""Return a callable for the list worker pools method over gRPC. List project's ``WorkerPools``. @@ -708,13 +683,15 @@ def list_worker_pools( # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "list_worker_pools" not in self._stubs: - self._stubs["list_worker_pools"] = self.grpc_channel.unary_unary( - "/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools", + if 'list_worker_pools' not in self._stubs: + self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( + '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, ) - return self._stubs["list_worker_pools"] + return self._stubs['list_worker_pools'] -__all__ = ("CloudBuildGrpcAsyncIOTransport",) +__all__ = ( + 'CloudBuildGrpcAsyncIOTransport', +) diff --git a/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/google/cloud/devtools/cloudbuild_v1/types/__init__.py index c9fc9fed..39596371 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -15,95 +15,51 @@ # limitations under the License. # -from .cloudbuild import ( - RetryBuildRequest, - RunBuildTriggerRequest, - StorageSource, - RepoSource, - Source, - BuiltImage, - BuildStep, - Volume, - Results, - ArtifactResult, - Build, - Artifacts, - TimeSpan, - BuildOperationMetadata, - SourceProvenance, - FileHashes, - Hash, - Secret, - CreateBuildRequest, - GetBuildRequest, - ListBuildsRequest, - ListBuildsResponse, - CancelBuildRequest, - BuildTrigger, - GitHubEventsConfig, - PullRequestFilter, - PushFilter, - CreateBuildTriggerRequest, - GetBuildTriggerRequest, - ListBuildTriggersRequest, - ListBuildTriggersResponse, - DeleteBuildTriggerRequest, - UpdateBuildTriggerRequest, - BuildOptions, - WorkerPool, - WorkerConfig, - Network, - CreateWorkerPoolRequest, - GetWorkerPoolRequest, - DeleteWorkerPoolRequest, - UpdateWorkerPoolRequest, - ListWorkerPoolsRequest, - ListWorkerPoolsResponse, -) +from .cloudbuild import (RetryBuildRequest, RunBuildTriggerRequest, StorageSource, RepoSource, Source, BuiltImage, BuildStep, Volume, Results, ArtifactResult, Build, Artifacts, TimeSpan, BuildOperationMetadata, SourceProvenance, FileHashes, Hash, Secret, CreateBuildRequest, GetBuildRequest, ListBuildsRequest, ListBuildsResponse, CancelBuildRequest, BuildTrigger, GitHubEventsConfig, PullRequestFilter, PushFilter, CreateBuildTriggerRequest, GetBuildTriggerRequest, ListBuildTriggersRequest, ListBuildTriggersResponse, DeleteBuildTriggerRequest, UpdateBuildTriggerRequest, BuildOptions, WorkerPool, WorkerConfig, Network, CreateWorkerPoolRequest, GetWorkerPoolRequest, DeleteWorkerPoolRequest, UpdateWorkerPoolRequest, ListWorkerPoolsRequest, ListWorkerPoolsResponse, ) __all__ = ( - "RetryBuildRequest", - "RunBuildTriggerRequest", - "StorageSource", - "RepoSource", - "Source", - "BuiltImage", - "BuildStep", - "Volume", - "Results", - "ArtifactResult", - "Build", - "Artifacts", - "TimeSpan", - "BuildOperationMetadata", - "SourceProvenance", - "FileHashes", - "Hash", - "Secret", - "CreateBuildRequest", - "GetBuildRequest", - "ListBuildsRequest", - "ListBuildsResponse", - "CancelBuildRequest", - "BuildTrigger", - "GitHubEventsConfig", - "PullRequestFilter", - "PushFilter", - "CreateBuildTriggerRequest", - "GetBuildTriggerRequest", - "ListBuildTriggersRequest", - "ListBuildTriggersResponse", - "DeleteBuildTriggerRequest", - "UpdateBuildTriggerRequest", - "BuildOptions", - "WorkerPool", - "WorkerConfig", - "Network", - "CreateWorkerPoolRequest", - "GetWorkerPoolRequest", - "DeleteWorkerPoolRequest", - "UpdateWorkerPoolRequest", - "ListWorkerPoolsRequest", - "ListWorkerPoolsResponse", + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'StorageSource', + 'RepoSource', + 'Source', + 'BuiltImage', + 'BuildStep', + 'Volume', + 'Results', + 'ArtifactResult', + 'Build', + 'Artifacts', + 'TimeSpan', + 'BuildOperationMetadata', + 'SourceProvenance', + 'FileHashes', + 'Hash', + 'Secret', + 'CreateBuildRequest', + 'GetBuildRequest', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'CancelBuildRequest', + 'BuildTrigger', + 'GitHubEventsConfig', + 'PullRequestFilter', + 'PushFilter', + 'CreateBuildTriggerRequest', + 'GetBuildTriggerRequest', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'DeleteBuildTriggerRequest', + 'UpdateBuildTriggerRequest', + 'BuildOptions', + 'WorkerPool', + 'WorkerConfig', + 'Network', + 'CreateWorkerPoolRequest', + 'GetWorkerPoolRequest', + 'DeleteWorkerPoolRequest', + 'UpdateWorkerPoolRequest', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', ) diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index b7f5dbbd..d76531e4 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -23,51 +23,51 @@ __protobuf__ = proto.module( - package="google.devtools.cloudbuild.v1", + package='google.devtools.cloudbuild.v1', manifest={ - "RetryBuildRequest", - "RunBuildTriggerRequest", - "StorageSource", - "RepoSource", - "Source", - "BuiltImage", - "BuildStep", - "Volume", - "Results", - "ArtifactResult", - "Build", - "Artifacts", - "TimeSpan", - "BuildOperationMetadata", - "SourceProvenance", - "FileHashes", - "Hash", - "Secret", - "CreateBuildRequest", - "GetBuildRequest", - "ListBuildsRequest", - "ListBuildsResponse", - "CancelBuildRequest", - "BuildTrigger", - "GitHubEventsConfig", - "PullRequestFilter", - "PushFilter", - "CreateBuildTriggerRequest", - "GetBuildTriggerRequest", - "ListBuildTriggersRequest", - "ListBuildTriggersResponse", - "DeleteBuildTriggerRequest", - "UpdateBuildTriggerRequest", - "BuildOptions", - "WorkerPool", - "WorkerConfig", - "Network", - "CreateWorkerPoolRequest", - "GetWorkerPoolRequest", - "DeleteWorkerPoolRequest", - "UpdateWorkerPoolRequest", - "ListWorkerPoolsRequest", - "ListWorkerPoolsResponse", + 'RetryBuildRequest', + 'RunBuildTriggerRequest', + 'StorageSource', + 'RepoSource', + 'Source', + 'BuiltImage', + 'BuildStep', + 'Volume', + 'Results', + 'ArtifactResult', + 'Build', + 'Artifacts', + 'TimeSpan', + 'BuildOperationMetadata', + 'SourceProvenance', + 'FileHashes', + 'Hash', + 'Secret', + 'CreateBuildRequest', + 'GetBuildRequest', + 'ListBuildsRequest', + 'ListBuildsResponse', + 'CancelBuildRequest', + 'BuildTrigger', + 'GitHubEventsConfig', + 'PullRequestFilter', + 'PushFilter', + 'CreateBuildTriggerRequest', + 'GetBuildTriggerRequest', + 'ListBuildTriggersRequest', + 'ListBuildTriggersResponse', + 'DeleteBuildTriggerRequest', + 'UpdateBuildTriggerRequest', + 'BuildOptions', + 'WorkerPool', + 'WorkerConfig', + 'Network', + 'CreateWorkerPoolRequest', + 'GetWorkerPoolRequest', + 'DeleteWorkerPoolRequest', + 'UpdateWorkerPoolRequest', + 'ListWorkerPoolsRequest', + 'ListWorkerPoolsResponse', }, ) @@ -104,7 +104,9 @@ class RunBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - source = proto.Field(proto.MESSAGE, number=3, message="RepoSource",) + source = proto.Field(proto.MESSAGE, number=3, + message='RepoSource', + ) class StorageSource(proto.Message): @@ -176,11 +178,11 @@ class RepoSource(proto.Message): repo_name = proto.Field(proto.STRING, number=2) - branch_name = proto.Field(proto.STRING, number=3, oneof="revision") + branch_name = proto.Field(proto.STRING, number=3, oneof='revision') - tag_name = proto.Field(proto.STRING, number=4, oneof="revision") + tag_name = proto.Field(proto.STRING, number=4, oneof='revision') - commit_sha = proto.Field(proto.STRING, number=5, oneof="revision") + commit_sha = proto.Field(proto.STRING, number=5, oneof='revision') dir = proto.Field(proto.STRING, number=7) @@ -201,12 +203,12 @@ class Source(proto.Message): location in a Cloud Source Repository. """ - storage_source = proto.Field( - proto.MESSAGE, number=2, oneof="source", message=StorageSource, + storage_source = proto.Field(proto.MESSAGE, number=2, oneof='source', + message=StorageSource, ) - repo_source = proto.Field( - proto.MESSAGE, number=3, oneof="source", message=RepoSource, + repo_source = proto.Field(proto.MESSAGE, number=3, oneof='source', + message=RepoSource, ) @@ -228,7 +230,9 @@ class BuiltImage(proto.Message): digest = proto.Field(proto.STRING, number=3) - push_timing = proto.Field(proto.MESSAGE, number=4, message="TimeSpan",) + push_timing = proto.Field(proto.MESSAGE, number=4, + message='TimeSpan', + ) class BuildStep(proto.Message): @@ -343,15 +347,25 @@ class BuildStep(proto.Message): secret_env = proto.RepeatedField(proto.STRING, number=8) - volumes = proto.RepeatedField(proto.MESSAGE, number=9, message="Volume",) + volumes = proto.RepeatedField(proto.MESSAGE, number=9, + message='Volume', + ) - timing = proto.Field(proto.MESSAGE, number=10, message="TimeSpan",) + timing = proto.Field(proto.MESSAGE, number=10, + message='TimeSpan', + ) - pull_timing = proto.Field(proto.MESSAGE, number=13, message="TimeSpan",) + pull_timing = proto.Field(proto.MESSAGE, number=13, + message='TimeSpan', + ) - timeout = proto.Field(proto.MESSAGE, number=11, message=duration.Duration,) + timeout = proto.Field(proto.MESSAGE, number=11, + message=duration.Duration, + ) - status = proto.Field(proto.ENUM, number=12, enum="Build.Status",) + status = proto.Field(proto.ENUM, number=12, + enum='Build.Status', + ) class Volume(proto.Message): @@ -407,7 +421,9 @@ class Results(proto.Message): Time to push all non-container artifacts. """ - images = proto.RepeatedField(proto.MESSAGE, number=2, message=BuiltImage,) + images = proto.RepeatedField(proto.MESSAGE, number=2, + message=BuiltImage, + ) build_step_images = proto.RepeatedField(proto.STRING, number=3) @@ -417,7 +433,9 @@ class Results(proto.Message): build_step_outputs = proto.RepeatedField(proto.BYTES, number=6) - artifact_timing = proto.Field(proto.MESSAGE, number=7, message="TimeSpan",) + artifact_timing = proto.Field(proto.MESSAGE, number=7, + message='TimeSpan', + ) class ArtifactResult(proto.Message): @@ -435,7 +453,9 @@ class ArtifactResult(proto.Message): location = proto.Field(proto.STRING, number=1) - file_hash = proto.RepeatedField(proto.MESSAGE, number=2, message="FileHashes",) + file_hash = proto.RepeatedField(proto.MESSAGE, number=2, + message='FileHashes', + ) class Build(proto.Message): @@ -551,7 +571,6 @@ class Build(proto.Message): If the build does not specify source or images, these keys will not be included. """ - class Status(proto.Enum): r"""Possible status of a build or build step.""" STATUS_UNKNOWN = 0 @@ -568,39 +587,61 @@ class Status(proto.Enum): project_id = proto.Field(proto.STRING, number=16) - status = proto.Field(proto.ENUM, number=2, enum=Status,) + status = proto.Field(proto.ENUM, number=2, + enum=Status, + ) status_detail = proto.Field(proto.STRING, number=24) - source = proto.Field(proto.MESSAGE, number=3, message=Source,) + source = proto.Field(proto.MESSAGE, number=3, + message=Source, + ) - steps = proto.RepeatedField(proto.MESSAGE, number=11, message=BuildStep,) + steps = proto.RepeatedField(proto.MESSAGE, number=11, + message=BuildStep, + ) - results = proto.Field(proto.MESSAGE, number=10, message=Results,) + results = proto.Field(proto.MESSAGE, number=10, + message=Results, + ) - create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=6, + message=timestamp.Timestamp, + ) - start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + start_time = proto.Field(proto.MESSAGE, number=7, + message=timestamp.Timestamp, + ) - finish_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + finish_time = proto.Field(proto.MESSAGE, number=8, + message=timestamp.Timestamp, + ) - timeout = proto.Field(proto.MESSAGE, number=12, message=duration.Duration,) + timeout = proto.Field(proto.MESSAGE, number=12, + message=duration.Duration, + ) images = proto.RepeatedField(proto.STRING, number=13) - queue_ttl = proto.Field(proto.MESSAGE, number=40, message=duration.Duration,) + queue_ttl = proto.Field(proto.MESSAGE, number=40, + message=duration.Duration, + ) - artifacts = proto.Field(proto.MESSAGE, number=37, message="Artifacts",) + artifacts = proto.Field(proto.MESSAGE, number=37, + message='Artifacts', + ) logs_bucket = proto.Field(proto.STRING, number=19) - source_provenance = proto.Field( - proto.MESSAGE, number=21, message="SourceProvenance", + source_provenance = proto.Field(proto.MESSAGE, number=21, + message='SourceProvenance', ) build_trigger_id = proto.Field(proto.STRING, number=22) - options = proto.Field(proto.MESSAGE, number=23, message="BuildOptions",) + options = proto.Field(proto.MESSAGE, number=23, + message='BuildOptions', + ) log_url = proto.Field(proto.STRING, number=25) @@ -608,9 +649,13 @@ class Status(proto.Enum): tags = proto.RepeatedField(proto.STRING, number=31) - secrets = proto.RepeatedField(proto.MESSAGE, number=32, message="Secret",) + secrets = proto.RepeatedField(proto.MESSAGE, number=32, + message='Secret', + ) - timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, message="TimeSpan",) + timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, + message='TimeSpan', + ) class Artifacts(proto.Message): @@ -645,7 +690,6 @@ class Artifacts(proto.Message): If any objects fail to be pushed, the build is marked FAILURE. """ - class ArtifactObjects(proto.Message): r"""Files in the workspace to upload to Cloud Storage upon successful completion of all build steps. @@ -670,11 +714,15 @@ class ArtifactObjects(proto.Message): paths = proto.RepeatedField(proto.STRING, number=2) - timing = proto.Field(proto.MESSAGE, number=3, message="TimeSpan",) + timing = proto.Field(proto.MESSAGE, number=3, + message='TimeSpan', + ) images = proto.RepeatedField(proto.STRING, number=1) - objects = proto.Field(proto.MESSAGE, number=2, message=ArtifactObjects,) + objects = proto.Field(proto.MESSAGE, number=2, + message=ArtifactObjects, + ) class TimeSpan(proto.Message): @@ -687,9 +735,13 @@ class TimeSpan(proto.Message): End of time span. """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + start_time = proto.Field(proto.MESSAGE, number=1, + message=timestamp.Timestamp, + ) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, + message=timestamp.Timestamp, + ) class BuildOperationMetadata(proto.Message): @@ -700,7 +752,9 @@ class BuildOperationMetadata(proto.Message): The build that the operation is tracking. """ - build = proto.Field(proto.MESSAGE, number=1, message=Build,) + build = proto.Field(proto.MESSAGE, number=1, + message=Build, + ) class SourceProvenance(proto.Message): @@ -729,14 +783,16 @@ class SourceProvenance(proto.Message): the single path to that file. """ - resolved_storage_source = proto.Field( - proto.MESSAGE, number=3, message=StorageSource, + resolved_storage_source = proto.Field(proto.MESSAGE, number=3, + message=StorageSource, ) - resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message=RepoSource,) + resolved_repo_source = proto.Field(proto.MESSAGE, number=6, + message=RepoSource, + ) - file_hashes = proto.MapField( - proto.STRING, proto.MESSAGE, number=4, message="FileHashes", + file_hashes = proto.MapField(proto.STRING, proto.MESSAGE, number=4, + message='FileHashes', ) @@ -750,7 +806,9 @@ class FileHashes(proto.Message): Collection of file hashes. """ - file_hash = proto.RepeatedField(proto.MESSAGE, number=1, message="Hash",) + file_hash = proto.RepeatedField(proto.MESSAGE, number=1, + message='Hash', + ) class Hash(proto.Message): @@ -762,14 +820,15 @@ class Hash(proto.Message): value (bytes): The hash value. """ - class HashType(proto.Enum): r"""Specifies the hash algorithm, if any.""" NONE = 0 SHA256 = 1 MD5 = 2 - type = proto.Field(proto.ENUM, number=1, enum=HashType,) + type = proto.Field(proto.ENUM, number=1, + enum=HashType, + ) value = proto.Field(proto.BYTES, number=2) @@ -810,7 +869,9 @@ class CreateBuildRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - build = proto.Field(proto.MESSAGE, number=2, message=Build,) + build = proto.Field(proto.MESSAGE, number=2, + message=Build, + ) class GetBuildRequest(proto.Message): @@ -866,7 +927,9 @@ class ListBuildsResponse(proto.Message): def raw_page(self): return self - builds = proto.RepeatedField(proto.MESSAGE, number=1, message=Build,) + builds = proto.RepeatedField(proto.MESSAGE, number=1, + message=Build, + ) next_page_token = proto.Field(proto.STRING, number=2) @@ -967,15 +1030,23 @@ class BuildTrigger(proto.Message): tags = proto.RepeatedField(proto.STRING, number=19) - trigger_template = proto.Field(proto.MESSAGE, number=7, message=RepoSource,) + trigger_template = proto.Field(proto.MESSAGE, number=7, + message=RepoSource, + ) - github = proto.Field(proto.MESSAGE, number=13, message="GitHubEventsConfig",) + github = proto.Field(proto.MESSAGE, number=13, + message='GitHubEventsConfig', + ) - build = proto.Field(proto.MESSAGE, number=4, oneof="build_template", message=Build,) + build = proto.Field(proto.MESSAGE, number=4, oneof='build_template', + message=Build, + ) - filename = proto.Field(proto.STRING, number=8, oneof="build_template") + filename = proto.Field(proto.STRING, number=8, oneof='build_template') - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=5, + message=timestamp.Timestamp, + ) disabled = proto.Field(proto.BOOL, number=9) @@ -1018,11 +1089,13 @@ class GitHubEventsConfig(proto.Message): name = proto.Field(proto.STRING, number=7) - pull_request = proto.Field( - proto.MESSAGE, number=4, oneof="event", message="PullRequestFilter", + pull_request = proto.Field(proto.MESSAGE, number=4, oneof='event', + message='PullRequestFilter', ) - push = proto.Field(proto.MESSAGE, number=5, oneof="event", message="PushFilter",) + push = proto.Field(proto.MESSAGE, number=5, oneof='event', + message='PushFilter', + ) class PullRequestFilter(proto.Message): @@ -1042,15 +1115,16 @@ class PullRequestFilter(proto.Message): If true, branches that do NOT match the git_ref will trigger a build. """ - class CommentControl(proto.Enum): r"""Controls behavior of Pull Request comments.""" COMMENTS_DISABLED = 0 COMMENTS_ENABLED = 1 - branch = proto.Field(proto.STRING, number=2, oneof="git_ref") + branch = proto.Field(proto.STRING, number=2, oneof='git_ref') - comment_control = proto.Field(proto.ENUM, number=5, enum=CommentControl,) + comment_control = proto.Field(proto.ENUM, number=5, + enum=CommentControl, + ) invert_regex = proto.Field(proto.BOOL, number=6) @@ -1075,9 +1149,9 @@ class PushFilter(proto.Message): NOT match the git_ref regex. """ - branch = proto.Field(proto.STRING, number=2, oneof="git_ref") + branch = proto.Field(proto.STRING, number=2, oneof='git_ref') - tag = proto.Field(proto.STRING, number=3, oneof="git_ref") + tag = proto.Field(proto.STRING, number=3, oneof='git_ref') invert_regex = proto.Field(proto.BOOL, number=4) @@ -1095,7 +1169,9 @@ class CreateBuildTriggerRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - trigger = proto.Field(proto.MESSAGE, number=2, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=2, + message=BuildTrigger, + ) class GetBuildTriggerRequest(proto.Message): @@ -1151,7 +1227,9 @@ class ListBuildTriggersResponse(proto.Message): def raw_page(self): return self - triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=BuildTrigger,) + triggers = proto.RepeatedField(proto.MESSAGE, number=1, + message=BuildTrigger, + ) next_page_token = proto.Field(proto.STRING, number=2) @@ -1189,7 +1267,9 @@ class UpdateBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - trigger = proto.Field(proto.MESSAGE, number=3, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=3, + message=BuildTrigger, + ) class BuildOptions(proto.Message): @@ -1254,7 +1334,6 @@ class BuildOptions(proto.Message): step is not valid as it is indicative of a build request with an incorrect configuration. """ - class VerifyOption(proto.Enum): r"""Specifies the manner in which the build should be verified, if at all. @@ -1289,29 +1368,41 @@ class LoggingMode(proto.Enum): LEGACY = 1 GCS_ONLY = 2 - source_provenance_hash = proto.RepeatedField( - proto.ENUM, number=1, enum=Hash.HashType, + source_provenance_hash = proto.RepeatedField(proto.ENUM, number=1, + enum=Hash.HashType, ) - requested_verify_option = proto.Field(proto.ENUM, number=2, enum=VerifyOption,) + requested_verify_option = proto.Field(proto.ENUM, number=2, + enum=VerifyOption, + ) - machine_type = proto.Field(proto.ENUM, number=3, enum=MachineType,) + machine_type = proto.Field(proto.ENUM, number=3, + enum=MachineType, + ) disk_size_gb = proto.Field(proto.INT64, number=6) - substitution_option = proto.Field(proto.ENUM, number=4, enum=SubstitutionOption,) + substitution_option = proto.Field(proto.ENUM, number=4, + enum=SubstitutionOption, + ) - log_streaming_option = proto.Field(proto.ENUM, number=5, enum=LogStreamingOption,) + log_streaming_option = proto.Field(proto.ENUM, number=5, + enum=LogStreamingOption, + ) worker_pool = proto.Field(proto.STRING, number=7) - logging = proto.Field(proto.ENUM, number=11, enum=LoggingMode,) + logging = proto.Field(proto.ENUM, number=11, + enum=LoggingMode, + ) env = proto.RepeatedField(proto.STRING, number=12) secret_env = proto.RepeatedField(proto.STRING, number=13) - volumes = proto.RepeatedField(proto.MESSAGE, number=14, message=Volume,) + volumes = proto.RepeatedField(proto.MESSAGE, number=14, + message=Volume, + ) class WorkerPool(proto.Message): @@ -1358,7 +1449,6 @@ class WorkerPool(proto.Message): status (~.cloudbuild.WorkerPool.Status): Output only. WorkerPool Status. """ - class Region(proto.Enum): r"""Supported GCP regions to create the ``WorkerPool``.""" REGION_UNSPECIFIED = 0 @@ -1383,17 +1473,29 @@ class Status(proto.Enum): worker_count = proto.Field(proto.INT64, number=4) - worker_config = proto.Field(proto.MESSAGE, number=16, message="WorkerConfig",) + worker_config = proto.Field(proto.MESSAGE, number=16, + message='WorkerConfig', + ) - regions = proto.RepeatedField(proto.ENUM, number=9, enum=Region,) + regions = proto.RepeatedField(proto.ENUM, number=9, + enum=Region, + ) - create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) + create_time = proto.Field(proto.MESSAGE, number=11, + message=timestamp.Timestamp, + ) - update_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=17, + message=timestamp.Timestamp, + ) - delete_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) + delete_time = proto.Field(proto.MESSAGE, number=12, + message=timestamp.Timestamp, + ) - status = proto.Field(proto.ENUM, number=13, enum=Status,) + status = proto.Field(proto.ENUM, number=13, + enum=Status, + ) class WorkerConfig(proto.Message): @@ -1434,7 +1536,9 @@ class WorkerConfig(proto.Message): disk_size_gb = proto.Field(proto.INT64, number=2) - network = proto.Field(proto.MESSAGE, number=3, message="Network",) + network = proto.Field(proto.MESSAGE, number=3, + message='Network', + ) tag = proto.Field(proto.STRING, number=4) @@ -1477,7 +1581,9 @@ class CreateWorkerPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - worker_pool = proto.Field(proto.MESSAGE, number=2, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=2, + message=WorkerPool, + ) class GetWorkerPoolRequest(proto.Message): @@ -1520,7 +1626,9 @@ class UpdateWorkerPoolRequest(proto.Message): name = proto.Field(proto.STRING, number=2) - worker_pool = proto.Field(proto.MESSAGE, number=3, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=3, + message=WorkerPool, + ) class ListWorkerPoolsRequest(proto.Message): @@ -1542,7 +1650,9 @@ class ListWorkerPoolsResponse(proto.Message): ``WorkerPools`` for the project. """ - worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message=WorkerPool,) + worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, + message=WorkerPool, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/synth.metadata b/synth.metadata index ec8cfe7f..71a531e8 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f221423c5c9d28a199c052eb4962afac4b749ea3" + "sha": "f07cb4446192952f19be3056957f56d180586055" } } ], diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 44666a8e..0212ebba 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -35,9 +35,7 @@ from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import ( - CloudBuildAsyncClient, -) +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports @@ -56,11 +54,7 @@ def client_cert_source_callback(): # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT def test__get_default_mtls_endpoint(): @@ -71,30 +65,17 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert CloudBuildClient._get_default_mtls_endpoint(None) is None - assert ( - CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) + assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi @pytest.mark.parametrize("client_class", [CloudBuildClient, CloudBuildAsyncClient]) def test_cloud_build_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client._transport._credentials == creds @@ -102,7 +83,7 @@ def test_cloud_build_client_from_service_account_file(client_class): client = client_class.from_service_account_json("dummy/file/path.json") assert client._transport._credentials == creds - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client._transport._host == 'cloudbuild.googleapis.com:443' def test_cloud_build_client_get_transport_class(): @@ -113,42 +94,29 @@ def test_cloud_build_client_get_transport_class(): assert transport == transports.CloudBuildGrpcTransport -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -@mock.patch.object( - CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) -) -@mock.patch.object( - CloudBuildAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudBuildAsyncClient), -) -def test_cloud_build_client_client_options( - client_class, transport_class, transport_name -): +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") +]) +@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) +@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) +def test_cloud_build_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=credentials.AnonymousCredentials() + ) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: + with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -164,7 +132,7 @@ def test_cloud_build_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -180,7 +148,7 @@ def test_cloud_build_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -196,10 +164,8 @@ def test_cloud_build_client_client_options( # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", and client_cert_source is provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -210,16 +176,14 @@ def test_cloud_build_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, + ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", and default_client_cert_source is provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -235,11 +199,8 @@ def test_cloud_build_client_client_options( # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", but client_cert_source and default_client_cert_source are None. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -260,7 +221,7 @@ def test_cloud_build_client_client_options( # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -274,23 +235,16 @@ def test_cloud_build_client_client_options( ) -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_cloud_build_client_client_options_scopes( - client_class, transport_class, transport_name -): +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") +]) +def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) - with mock.patch.object(transport_class, "__init__") as patched: + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -304,23 +258,16 @@ def test_cloud_build_client_client_options_scopes( ) -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_cloud_build_client_client_options_credentials_file( - client_class, transport_class, transport_name -): +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") +]) +def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name): # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, "__init__") as patched: + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -335,11 +282,11 @@ def test_cloud_build_client_client_options_credentials_file( def test_cloud_build_client_client_options_from_dict(): - with mock.patch( - "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__" - ) as grpc_transport: + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None - client = CloudBuildClient(client_options={"api_endpoint": "squid.clam.whelk"}) + client = CloudBuildClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -351,11 +298,10 @@ def test_cloud_build_client_client_options_from_dict(): ) -def test_create_build( - transport: str = "grpc", request_type=cloudbuild.CreateBuildRequest -): +def test_create_build(transport: str = 'grpc', request_type=cloudbuild.CreateBuildRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -363,9 +309,11 @@ def test_create_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object( + type(client._transport.create_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") + call.return_value = operations_pb2.Operation(name='operations/spam') response = client.create_build(request) @@ -384,9 +332,10 @@ def test_create_build_from_dict(): @pytest.mark.asyncio -async def test_create_build_async(transport: str = "grpc_asyncio"): +async def test_create_build_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -395,11 +344,11 @@ async def test_create_build_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + type(client._client._transport.create_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) response = await client.create_build(request) @@ -415,17 +364,22 @@ async def test_create_build_async(transport: str = "grpc_asyncio"): def test_create_build_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object( + type(client._transport.create_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id='project_id_value', + build=cloudbuild.Build(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -433,42 +387,47 @@ def test_create_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(id='id_value') def test_create_build_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_build( cloudbuild.CreateBuildRequest(), - project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + project_id='project_id_value', + build=cloudbuild.Build(id='id_value'), ) @pytest.mark.asyncio async def test_create_build_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + type(client._client._transport.create_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id='project_id_value', + build=cloudbuild.Build(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -476,28 +435,31 @@ async def test_create_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(id='id_value') @pytest.mark.asyncio async def test_create_build_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_build( cloudbuild.CreateBuildRequest(), - project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + project_id='project_id_value', + build=cloudbuild.Build(id='id_value'), ) -def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequest): +def test_get_build(transport: str = 'grpc', request_type=cloudbuild.GetBuildRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -505,18 +467,29 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object( + type(client._transport.get_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( - id="id_value", - project_id="project_id_value", + id='id_value', + + project_id='project_id_value', + status=cloudbuild.Build.Status.QUEUED, - status_detail="status_detail_value", - images=["images_value"], - logs_bucket="logs_bucket_value", - build_trigger_id="build_trigger_id_value", - log_url="log_url_value", - tags=["tags_value"], + + status_detail='status_detail_value', + + images=['images_value'], + + logs_bucket='logs_bucket_value', + + build_trigger_id='build_trigger_id_value', + + log_url='log_url_value', + + tags=['tags_value'], + ) response = client.get_build(request) @@ -530,23 +503,23 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == "status_detail_value" + assert response.status_detail == 'status_detail_value' - assert response.images == ["images_value"] + assert response.images == ['images_value'] - assert response.logs_bucket == "logs_bucket_value" + assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == "build_trigger_id_value" + assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == "log_url_value" + assert response.log_url == 'log_url_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] def test_get_build_from_dict(): @@ -554,9 +527,10 @@ def test_get_build_from_dict(): @pytest.mark.asyncio -async def test_get_build_async(transport: str = "grpc_asyncio"): +async def test_get_build_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -565,22 +539,20 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + type(client._client._transport.get_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.Build( - id="id_value", - project_id="project_id_value", - status=cloudbuild.Build.Status.QUEUED, - status_detail="status_detail_value", - images=["images_value"], - logs_bucket="logs_bucket_value", - build_trigger_id="build_trigger_id_value", - log_url="log_url_value", - tags=["tags_value"], - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.QUEUED, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + )) response = await client.get_build(request) @@ -593,37 +565,42 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == "status_detail_value" + assert response.status_detail == 'status_detail_value' - assert response.images == ["images_value"] + assert response.images == ['images_value'] - assert response.logs_bucket == "logs_bucket_value" + assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == "build_trigger_id_value" + assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == "log_url_value" + assert response.log_url == 'log_url_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] def test_get_build_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object( + type(client._transport.get_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_build( - project_id="project_id_value", id="id_value", + project_id='project_id_value', + id='id_value', ) # Establish that the underlying call was made with the expected @@ -631,65 +608,77 @@ def test_get_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' def test_get_build_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_build( - cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', ) @pytest.mark.asyncio async def test_get_build_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + type(client._client._transport.get_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_build(project_id="project_id_value", id="id_value",) + response = await client.get_build( + project_id='project_id_value', + id='id_value', + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' @pytest.mark.asyncio async def test_get_build_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_build( - cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", + cloudbuild.GetBuildRequest(), + project_id='project_id_value', + id='id_value', ) -def test_list_builds( - transport: str = "grpc", request_type=cloudbuild.ListBuildsRequest -): +def test_list_builds(transport: str = 'grpc', request_type=cloudbuild.ListBuildsRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -697,10 +686,13 @@ def test_list_builds( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object( + type(client._transport.list_builds), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse( - next_page_token="next_page_token_value", + next_page_token='next_page_token_value', + ) response = client.list_builds(request) @@ -714,7 +706,7 @@ def test_list_builds( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == "next_page_token_value" + assert response.next_page_token == 'next_page_token_value' def test_list_builds_from_dict(): @@ -722,9 +714,10 @@ def test_list_builds_from_dict(): @pytest.mark.asyncio -async def test_list_builds_async(transport: str = "grpc_asyncio"): +async def test_list_builds_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -733,12 +726,12 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + type(client._client._transport.list_builds), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildsResponse(next_page_token="next_page_token_value",) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( + next_page_token='next_page_token_value', + )) response = await client.list_builds(request) @@ -751,21 +744,26 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response.next_page_token == 'next_page_token_value' def test_list_builds_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object( + type(client._transport.list_builds), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_builds( - project_id="project_id_value", filter="filter_value", + project_id='project_id_value', + filter='filter_value', ) # Establish that the underlying call was made with the expected @@ -773,42 +771,45 @@ def test_list_builds_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].filter == "filter_value" + assert args[0].filter == 'filter_value' def test_list_builds_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_builds( cloudbuild.ListBuildsRequest(), - project_id="project_id_value", - filter="filter_value", + project_id='project_id_value', + filter='filter_value', ) @pytest.mark.asyncio async def test_list_builds_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + type(client._client._transport.list_builds), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_builds( - project_id="project_id_value", filter="filter_value", + project_id='project_id_value', + filter='filter_value', ) # Establish that the underlying call was made with the expected @@ -816,42 +817,61 @@ async def test_list_builds_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].filter == "filter_value" + assert args[0].filter == 'filter_value' @pytest.mark.asyncio async def test_list_builds_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_builds( cloudbuild.ListBuildsRequest(), - project_id="project_id_value", - filter="filter_value", + project_id='project_id_value', + filter='filter_value', ) def test_list_builds_pager(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object( + type(client._transport.list_builds), + '__call__') as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], - next_page_token="abc", + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', ), - cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(),], next_page_token="ghi", + builds=[], + next_page_token='def', ), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(),], + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], ), RuntimeError, ) @@ -863,106 +883,147 @@ def test_list_builds_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) for i in results) - + assert all(isinstance(i, cloudbuild.Build) + for i in results) def test_list_builds_pages(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object( + type(client._transport.list_builds), + '__call__') as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], - next_page_token="abc", + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', ), - cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(),], next_page_token="ghi", + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', ), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(),], + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], ), RuntimeError, ) pages = list(client.list_builds(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): + for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token - @pytest.mark.asyncio async def test_list_builds_async_pager(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, - ) as call: + type(client._client._transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], - next_page_token="abc", + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token='def', ), - cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(),], next_page_token="ghi", + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', ), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(),], + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], ), RuntimeError, ) async_pager = await client.list_builds(request={},) - assert async_pager.next_page_token == "abc" + assert async_pager.next_page_token == 'abc' responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.Build) for i in responses) - + assert all(isinstance(i, cloudbuild.Build) + for i in responses) @pytest.mark.asyncio async def test_list_builds_async_pages(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, - ) as call: + type(client._client._transport.list_builds), + '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], - next_page_token="abc", + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token='abc', ), - cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(),], next_page_token="ghi", + builds=[], + next_page_token='def', ), cloudbuild.ListBuildsResponse( - builds=[cloudbuild.Build(), cloudbuild.Build(),], + builds=[ + cloudbuild.Build(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], ), RuntimeError, ) pages = [] async for page in (await client.list_builds(request={})).pages: pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): + for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token -def test_cancel_build( - transport: str = "grpc", request_type=cloudbuild.CancelBuildRequest -): +def test_cancel_build(transport: str = 'grpc', request_type=cloudbuild.CancelBuildRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -970,18 +1031,29 @@ def test_cancel_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object( + type(client._transport.cancel_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( - id="id_value", - project_id="project_id_value", + id='id_value', + + project_id='project_id_value', + status=cloudbuild.Build.Status.QUEUED, - status_detail="status_detail_value", - images=["images_value"], - logs_bucket="logs_bucket_value", - build_trigger_id="build_trigger_id_value", - log_url="log_url_value", - tags=["tags_value"], + + status_detail='status_detail_value', + + images=['images_value'], + + logs_bucket='logs_bucket_value', + + build_trigger_id='build_trigger_id_value', + + log_url='log_url_value', + + tags=['tags_value'], + ) response = client.cancel_build(request) @@ -995,23 +1067,23 @@ def test_cancel_build( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == "status_detail_value" + assert response.status_detail == 'status_detail_value' - assert response.images == ["images_value"] + assert response.images == ['images_value'] - assert response.logs_bucket == "logs_bucket_value" + assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == "build_trigger_id_value" + assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == "log_url_value" + assert response.log_url == 'log_url_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] def test_cancel_build_from_dict(): @@ -1019,9 +1091,10 @@ def test_cancel_build_from_dict(): @pytest.mark.asyncio -async def test_cancel_build_async(transport: str = "grpc_asyncio"): +async def test_cancel_build_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1030,22 +1103,20 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + type(client._client._transport.cancel_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.Build( - id="id_value", - project_id="project_id_value", - status=cloudbuild.Build.Status.QUEUED, - status_detail="status_detail_value", - images=["images_value"], - logs_bucket="logs_bucket_value", - build_trigger_id="build_trigger_id_value", - log_url="log_url_value", - tags=["tags_value"], - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( + id='id_value', + project_id='project_id_value', + status=cloudbuild.Build.Status.QUEUED, + status_detail='status_detail_value', + images=['images_value'], + logs_bucket='logs_bucket_value', + build_trigger_id='build_trigger_id_value', + log_url='log_url_value', + tags=['tags_value'], + )) response = await client.cancel_build(request) @@ -1058,37 +1129,42 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == "status_detail_value" + assert response.status_detail == 'status_detail_value' - assert response.images == ["images_value"] + assert response.images == ['images_value'] - assert response.logs_bucket == "logs_bucket_value" + assert response.logs_bucket == 'logs_bucket_value' - assert response.build_trigger_id == "build_trigger_id_value" + assert response.build_trigger_id == 'build_trigger_id_value' - assert response.log_url == "log_url_value" + assert response.log_url == 'log_url_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] def test_cancel_build_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object( + type(client._transport.cancel_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.cancel_build( - project_id="project_id_value", id="id_value", + project_id='project_id_value', + id='id_value', ) # Establish that the underlying call was made with the expected @@ -1096,32 +1172,36 @@ def test_cancel_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' def test_cancel_build_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_build( cloudbuild.CancelBuildRequest(), - project_id="project_id_value", - id="id_value", + project_id='project_id_value', + id='id_value', ) @pytest.mark.asyncio async def test_cancel_build_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + type(client._client._transport.cancel_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1129,7 +1209,8 @@ async def test_cancel_build_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.cancel_build( - project_id="project_id_value", id="id_value", + project_id='project_id_value', + id='id_value', ) # Establish that the underlying call was made with the expected @@ -1137,30 +1218,31 @@ async def test_cancel_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' @pytest.mark.asyncio async def test_cancel_build_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_build( cloudbuild.CancelBuildRequest(), - project_id="project_id_value", - id="id_value", + project_id='project_id_value', + id='id_value', ) -def test_retry_build( - transport: str = "grpc", request_type=cloudbuild.RetryBuildRequest -): +def test_retry_build(transport: str = 'grpc', request_type=cloudbuild.RetryBuildRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1168,9 +1250,11 @@ def test_retry_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object( + type(client._transport.retry_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") + call.return_value = operations_pb2.Operation(name='operations/spam') response = client.retry_build(request) @@ -1189,9 +1273,10 @@ def test_retry_build_from_dict(): @pytest.mark.asyncio -async def test_retry_build_async(transport: str = "grpc_asyncio"): +async def test_retry_build_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1200,11 +1285,11 @@ async def test_retry_build_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + type(client._client._transport.retry_build), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) response = await client.retry_build(request) @@ -1220,17 +1305,22 @@ async def test_retry_build_async(transport: str = "grpc_asyncio"): def test_retry_build_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object( + type(client._transport.retry_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.retry_build( - project_id="project_id_value", id="id_value", + project_id='project_id_value', + id='id_value', ) # Establish that the underlying call was made with the expected @@ -1238,42 +1328,47 @@ def test_retry_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' def test_retry_build_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.retry_build( cloudbuild.RetryBuildRequest(), - project_id="project_id_value", - id="id_value", + project_id='project_id_value', + id='id_value', ) @pytest.mark.asyncio async def test_retry_build_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + type(client._client._transport.retry_build), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.retry_build( - project_id="project_id_value", id="id_value", + project_id='project_id_value', + id='id_value', ) # Establish that the underlying call was made with the expected @@ -1281,30 +1376,31 @@ async def test_retry_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].id == "id_value" + assert args[0].id == 'id_value' @pytest.mark.asyncio async def test_retry_build_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.retry_build( cloudbuild.RetryBuildRequest(), - project_id="project_id_value", - id="id_value", + project_id='project_id_value', + id='id_value', ) -def test_create_build_trigger( - transport: str = "grpc", request_type=cloudbuild.CreateBuildTriggerRequest -): +def test_create_build_trigger(transport: str = 'grpc', request_type=cloudbuild.CreateBuildTriggerRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1313,18 +1409,25 @@ def test_create_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" - ) as call: + type(client._transport.create_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], + id='id_value', + + description='description_value', + + name='name_value', + + tags=['tags_value'], + disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + + ignored_files=['ignored_files_value'], + + included_files=['included_files_value'], + + build=cloudbuild.Build(id='id_value'), ) response = client.create_build_trigger(request) @@ -1338,19 +1441,19 @@ def test_create_build_trigger( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_create_build_trigger_from_dict(): @@ -1358,9 +1461,10 @@ def test_create_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_create_build_trigger_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1369,20 +1473,18 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" - ) as call: + type(client._client._transport.create_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], - disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + )) response = await client.create_build_trigger(request) @@ -1395,36 +1497,38 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_create_build_trigger_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" - ) as call: + type(client._transport.create_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build_trigger( - project_id="project_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -1432,43 +1536,45 @@ def test_create_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") + assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') def test_create_build_trigger_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_build_trigger( cloudbuild.CreateBuildTriggerRequest(), - project_id="project_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) @pytest.mark.asyncio async def test_create_build_trigger_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" - ) as call: + type(client._client._transport.create_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build_trigger( - project_id="project_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -1476,30 +1582,31 @@ async def test_create_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") + assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') @pytest.mark.asyncio async def test_create_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_build_trigger( cloudbuild.CreateBuildTriggerRequest(), - project_id="project_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) -def test_get_build_trigger( - transport: str = "grpc", request_type=cloudbuild.GetBuildTriggerRequest -): +def test_get_build_trigger(transport: str = 'grpc', request_type=cloudbuild.GetBuildTriggerRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1508,18 +1615,25 @@ def test_get_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" - ) as call: + type(client._transport.get_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], + id='id_value', + + description='description_value', + + name='name_value', + + tags=['tags_value'], + disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + + ignored_files=['ignored_files_value'], + + included_files=['included_files_value'], + + build=cloudbuild.Build(id='id_value'), ) response = client.get_build_trigger(request) @@ -1533,19 +1647,19 @@ def test_get_build_trigger( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_get_build_trigger_from_dict(): @@ -1553,9 +1667,10 @@ def test_get_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_get_build_trigger_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1564,20 +1679,18 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" - ) as call: + type(client._client._transport.get_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], - disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + )) response = await client.get_build_trigger(request) @@ -1590,35 +1703,38 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_get_build_trigger_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" - ) as call: + type(client._transport.get_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_build_trigger( - project_id="project_id_value", trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) # Establish that the underlying call was made with the expected @@ -1626,42 +1742,45 @@ def test_get_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' def test_get_build_trigger_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_build_trigger( cloudbuild.GetBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) @pytest.mark.asyncio async def test_get_build_trigger_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" - ) as call: + type(client._client._transport.get_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_build_trigger( - project_id="project_id_value", trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) # Establish that the underlying call was made with the expected @@ -1669,30 +1788,31 @@ async def test_get_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' @pytest.mark.asyncio async def test_get_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_build_trigger( cloudbuild.GetBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) -def test_list_build_triggers( - transport: str = "grpc", request_type=cloudbuild.ListBuildTriggersRequest -): +def test_list_build_triggers(transport: str = 'grpc', request_type=cloudbuild.ListBuildTriggersRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1701,11 +1821,12 @@ def test_list_build_triggers( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" - ) as call: + type(client._transport.list_build_triggers), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token="next_page_token_value", + next_page_token='next_page_token_value', + ) response = client.list_build_triggers(request) @@ -1719,7 +1840,7 @@ def test_list_build_triggers( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == "next_page_token_value" + assert response.next_page_token == 'next_page_token_value' def test_list_build_triggers_from_dict(): @@ -1727,9 +1848,10 @@ def test_list_build_triggers_from_dict(): @pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): +async def test_list_build_triggers_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1738,14 +1860,12 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" - ) as call: + type(client._client._transport.list_build_triggers), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildTriggersResponse( - next_page_token="next_page_token_value", - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( + next_page_token='next_page_token_value', + )) response = await client.list_build_triggers(request) @@ -1758,87 +1878,101 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert response.next_page_token == 'next_page_token_value' def test_list_build_triggers_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" - ) as call: + type(client._transport.list_build_triggers), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_build_triggers(project_id="project_id_value",) + client.list_build_triggers( + project_id='project_id_value', + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' def test_list_build_triggers_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), project_id="project_id_value", + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', ) @pytest.mark.asyncio async def test_list_build_triggers_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" - ) as call: + type(client._client._transport.list_build_triggers), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListBuildTriggersResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_build_triggers(project_id="project_id_value",) + response = await client.list_build_triggers( + project_id='project_id_value', + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' @pytest.mark.asyncio async def test_list_build_triggers_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), project_id="project_id_value", + cloudbuild.ListBuildTriggersRequest(), + project_id='project_id_value', ) def test_list_build_triggers_pager(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" - ) as call: + type(client._transport.list_build_triggers), + '__call__') as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -1847,14 +1981,23 @@ def test_list_build_triggers_pager(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token="abc", + next_page_token='abc', ), - cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', ), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], ), RuntimeError, ) @@ -1866,16 +2009,18 @@ def test_list_build_triggers_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) for i in results) - + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in results) def test_list_build_triggers_pages(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" - ) as call: + type(client._transport.list_build_triggers), + '__call__') as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -1884,32 +2029,40 @@ def test_list_build_triggers_pages(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token="abc", + next_page_token='abc', ), - cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", + triggers=[], + next_page_token='def', ), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], ), RuntimeError, ) pages = list(client.list_build_triggers(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): + for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token - @pytest.mark.asyncio async def test_list_build_triggers_async_pager(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: + type(client._client._transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -1918,37 +2071,46 @@ async def test_list_build_triggers_async_pager(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token="abc", + next_page_token='abc', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token='def', ), - cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', ), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], ), RuntimeError, ) async_pager = await client.list_build_triggers(request={},) - assert async_pager.next_page_token == "abc" + assert async_pager.next_page_token == 'abc' responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) for i in responses) - + assert all(isinstance(i, cloudbuild.BuildTrigger) + for i in responses) @pytest.mark.asyncio async def test_list_build_triggers_async_pages(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - "__call__", - new_callable=mock.AsyncMock, - ) as call: + type(client._client._transport.list_build_triggers), + '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -1957,29 +2119,37 @@ async def test_list_build_triggers_async_pages(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token="abc", + next_page_token='abc', ), - cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", + triggers=[], + next_page_token='def', + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token='ghi', ), cloudbuild.ListBuildTriggersResponse( - triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], ), RuntimeError, ) pages = [] async for page in (await client.list_build_triggers(request={})).pages: pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): + for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token -def test_delete_build_trigger( - transport: str = "grpc", request_type=cloudbuild.DeleteBuildTriggerRequest -): +def test_delete_build_trigger(transport: str = 'grpc', request_type=cloudbuild.DeleteBuildTriggerRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1988,8 +2158,8 @@ def test_delete_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" - ) as call: + type(client._transport.delete_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2010,9 +2180,10 @@ def test_delete_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2021,8 +2192,8 @@ async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" - ) as call: + type(client._client._transport.delete_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2039,19 +2210,22 @@ async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): def test_delete_build_trigger_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" - ) as call: + type(client._transport.delete_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_build_trigger( - project_id="project_id_value", trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) # Establish that the underlying call was made with the expected @@ -2059,32 +2233,36 @@ def test_delete_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' def test_delete_build_trigger_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_build_trigger( cloudbuild.DeleteBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) @pytest.mark.asyncio async def test_delete_build_trigger_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" - ) as call: + type(client._client._transport.delete_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2092,7 +2270,8 @@ async def test_delete_build_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_build_trigger( - project_id="project_id_value", trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) # Establish that the underlying call was made with the expected @@ -2100,30 +2279,31 @@ async def test_delete_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' @pytest.mark.asyncio async def test_delete_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_build_trigger( cloudbuild.DeleteBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", + project_id='project_id_value', + trigger_id='trigger_id_value', ) -def test_update_build_trigger( - transport: str = "grpc", request_type=cloudbuild.UpdateBuildTriggerRequest -): +def test_update_build_trigger(transport: str = 'grpc', request_type=cloudbuild.UpdateBuildTriggerRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2132,18 +2312,25 @@ def test_update_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" - ) as call: + type(client._transport.update_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], + id='id_value', + + description='description_value', + + name='name_value', + + tags=['tags_value'], + disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + + ignored_files=['ignored_files_value'], + + included_files=['included_files_value'], + + build=cloudbuild.Build(id='id_value'), ) response = client.update_build_trigger(request) @@ -2157,19 +2344,19 @@ def test_update_build_trigger( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_update_build_trigger_from_dict(): @@ -2177,9 +2364,10 @@ def test_update_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_update_build_trigger_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2188,20 +2376,18 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" - ) as call: + type(client._client._transport.update_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger( - id="id_value", - description="description_value", - name="name_value", - tags=["tags_value"], - disabled=True, - ignored_files=["ignored_files_value"], - included_files=["included_files_value"], - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( + id='id_value', + description='description_value', + name='name_value', + tags=['tags_value'], + disabled=True, + ignored_files=['ignored_files_value'], + included_files=['included_files_value'], + )) response = await client.update_build_trigger(request) @@ -2214,37 +2400,39 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == "id_value" + assert response.id == 'id_value' - assert response.description == "description_value" + assert response.description == 'description_value' - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.tags == ["tags_value"] + assert response.tags == ['tags_value'] assert response.disabled is True - assert response.ignored_files == ["ignored_files_value"] + assert response.ignored_files == ['ignored_files_value'] - assert response.included_files == ["included_files_value"] + assert response.included_files == ['included_files_value'] def test_update_build_trigger_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" - ) as call: + type(client._transport.update_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_build_trigger( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -2252,47 +2440,49 @@ def test_update_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' - assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") + assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') def test_update_build_trigger_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_build_trigger( cloudbuild.UpdateBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) @pytest.mark.asyncio async def test_update_build_trigger_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" - ) as call: + type(client._client._transport.update_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.BuildTrigger() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_build_trigger( - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) # Establish that the underlying call was made with the expected @@ -2300,33 +2490,34 @@ async def test_update_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' - assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") + assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') @pytest.mark.asyncio async def test_update_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_build_trigger( cloudbuild.UpdateBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", - trigger=cloudbuild.BuildTrigger(id="id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + trigger=cloudbuild.BuildTrigger(id='id_value'), ) -def test_run_build_trigger( - transport: str = "grpc", request_type=cloudbuild.RunBuildTriggerRequest -): +def test_run_build_trigger(transport: str = 'grpc', request_type=cloudbuild.RunBuildTriggerRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2335,10 +2526,10 @@ def test_run_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" - ) as call: + type(client._transport.run_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") + call.return_value = operations_pb2.Operation(name='operations/spam') response = client.run_build_trigger(request) @@ -2357,9 +2548,10 @@ def test_run_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_run_build_trigger_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2368,11 +2560,11 @@ async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" - ) as call: + type(client._client._transport.run_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) response = await client.run_build_trigger(request) @@ -2388,21 +2580,23 @@ async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): def test_run_build_trigger_flattened(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" - ) as call: + type(client._transport.run_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.run_build_trigger( - project_id="project_id_value", - trigger_id="trigger_id_value", - source=cloudbuild.RepoSource(project_id="project_id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), ) # Establish that the underlying call was made with the expected @@ -2410,47 +2604,51 @@ def test_run_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' - assert args[0].source == cloudbuild.RepoSource(project_id="project_id_value") + assert args[0].source == cloudbuild.RepoSource(project_id='project_id_value') def test_run_build_trigger_flattened_error(): - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.run_build_trigger( cloudbuild.RunBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", - source=cloudbuild.RepoSource(project_id="project_id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), ) @pytest.mark.asyncio async def test_run_build_trigger_flattened_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" - ) as call: + type(client._client._transport.run_build_trigger), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = operations_pb2.Operation(name='operations/op') call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + operations_pb2.Operation(name='operations/spam') ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.run_build_trigger( - project_id="project_id_value", - trigger_id="trigger_id_value", - source=cloudbuild.RepoSource(project_id="project_id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), ) # Establish that the underlying call was made with the expected @@ -2458,33 +2656,34 @@ async def test_run_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + assert args[0].project_id == 'project_id_value' - assert args[0].trigger_id == "trigger_id_value" + assert args[0].trigger_id == 'trigger_id_value' - assert args[0].source == cloudbuild.RepoSource(project_id="project_id_value") + assert args[0].source == cloudbuild.RepoSource(project_id='project_id_value') @pytest.mark.asyncio async def test_run_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = CloudBuildAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.run_build_trigger( cloudbuild.RunBuildTriggerRequest(), - project_id="project_id_value", - trigger_id="trigger_id_value", - source=cloudbuild.RepoSource(project_id="project_id_value"), + project_id='project_id_value', + trigger_id='trigger_id_value', + source=cloudbuild.RepoSource(project_id='project_id_value'), ) -def test_create_worker_pool( - transport: str = "grpc", request_type=cloudbuild.CreateWorkerPoolRequest -): +def test_create_worker_pool(transport: str = 'grpc', request_type=cloudbuild.CreateWorkerPoolRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2493,16 +2692,22 @@ def test_create_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_worker_pool), "__call__" - ) as call: + type(client._transport.create_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", + name='name_value', + + project_id='project_id_value', + + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) response = client.create_worker_pool(request) @@ -2516,11 +2721,11 @@ def test_create_worker_pool( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2534,9 +2739,10 @@ def test_create_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_create_worker_pool_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2545,19 +2751,17 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_worker_pool), "__call__" - ) as call: + type(client._client._transport.create_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( + name='name_value', + project_id='project_id_value', + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + )) response = await client.create_worker_pool(request) @@ -2570,11 +2774,11 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2583,11 +2787,10 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_get_worker_pool( - transport: str = "grpc", request_type=cloudbuild.GetWorkerPoolRequest -): +def test_get_worker_pool(transport: str = 'grpc', request_type=cloudbuild.GetWorkerPoolRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2595,15 +2798,23 @@ def test_get_worker_pool( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_worker_pool), "__call__") as call: + with mock.patch.object( + type(client._transport.get_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", + name='name_value', + + project_id='project_id_value', + + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) response = client.get_worker_pool(request) @@ -2617,11 +2828,11 @@ def test_get_worker_pool( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2635,9 +2846,10 @@ def test_get_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_get_worker_pool_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2646,19 +2858,17 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_worker_pool), "__call__" - ) as call: + type(client._client._transport.get_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( + name='name_value', + project_id='project_id_value', + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + )) response = await client.get_worker_pool(request) @@ -2671,11 +2881,11 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2684,11 +2894,10 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_delete_worker_pool( - transport: str = "grpc", request_type=cloudbuild.DeleteWorkerPoolRequest -): +def test_delete_worker_pool(transport: str = 'grpc', request_type=cloudbuild.DeleteWorkerPoolRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2697,8 +2906,8 @@ def test_delete_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_worker_pool), "__call__" - ) as call: + type(client._transport.delete_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2719,9 +2928,10 @@ def test_delete_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2730,8 +2940,8 @@ async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_worker_pool), "__call__" - ) as call: + type(client._client._transport.delete_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2747,11 +2957,10 @@ async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): assert response is None -def test_update_worker_pool( - transport: str = "grpc", request_type=cloudbuild.UpdateWorkerPoolRequest -): +def test_update_worker_pool(transport: str = 'grpc', request_type=cloudbuild.UpdateWorkerPoolRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2760,16 +2969,22 @@ def test_update_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_worker_pool), "__call__" - ) as call: + type(client._transport.update_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", + name='name_value', + + project_id='project_id_value', + + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) response = client.update_worker_pool(request) @@ -2783,11 +2998,11 @@ def test_update_worker_pool( # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2801,9 +3016,10 @@ def test_update_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_update_worker_pool_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2812,19 +3028,17 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_worker_pool), "__call__" - ) as call: + type(client._client._transport.update_worker_pool), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.WorkerPool( - name="name_value", - project_id="project_id_value", - service_account_email="service_account_email_value", - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( + name='name_value', + project_id='project_id_value', + service_account_email='service_account_email_value', + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + )) response = await client.update_worker_pool(request) @@ -2837,11 +3051,11 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == "name_value" + assert response.name == 'name_value' - assert response.project_id == "project_id_value" + assert response.project_id == 'project_id_value' - assert response.service_account_email == "service_account_email_value" + assert response.service_account_email == 'service_account_email_value' assert response.worker_count == 1314 @@ -2850,11 +3064,10 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_list_worker_pools( - transport: str = "grpc", request_type=cloudbuild.ListWorkerPoolsRequest -): +def test_list_worker_pools(transport: str = 'grpc', request_type=cloudbuild.ListWorkerPoolsRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2863,10 +3076,11 @@ def test_list_worker_pools( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_worker_pools), "__call__" - ) as call: + type(client._transport.list_worker_pools), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse() + call.return_value = cloudbuild.ListWorkerPoolsResponse( + ) response = client.list_worker_pools(request) @@ -2885,9 +3099,10 @@ def test_list_worker_pools_from_dict(): @pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): +async def test_list_worker_pools_async(transport: str = 'grpc_asyncio'): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2896,12 +3111,11 @@ async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_worker_pools), "__call__" - ) as call: + type(client._client._transport.list_worker_pools), + '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - cloudbuild.ListWorkerPoolsResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( + )) response = await client.list_worker_pools(request) @@ -2922,7 +3136,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2941,7 +3156,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2971,8 +3187,13 @@ def test_transport_get_channel(): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.CloudBuildGrpcTransport,) + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client._transport, + transports.CloudBuildGrpcTransport, + ) def test_cloud_build_base_transport_error(): @@ -2980,15 +3201,13 @@ def test_cloud_build_base_transport_error(): with pytest.raises(exceptions.DuplicateCredentialArgs): transport = transports.CloudBuildTransport( credentials=credentials.AnonymousCredentials(), - credentials_file="credentials.json", + credentials_file="credentials.json" ) def test_cloud_build_base_transport(): # Instantiate the base transport. - with mock.patch( - "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" - ) as Transport: + with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: Transport.return_value = None transport = transports.CloudBuildTransport( credentials=credentials.AnonymousCredentials(), @@ -2997,23 +3216,23 @@ def test_cloud_build_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "create_build", - "get_build", - "list_builds", - "cancel_build", - "retry_build", - "create_build_trigger", - "get_build_trigger", - "list_build_triggers", - "delete_build_trigger", - "update_build_trigger", - "run_build_trigger", - "create_worker_pool", - "get_worker_pool", - "delete_worker_pool", - "update_worker_pool", - "list_worker_pools", - ) + 'create_build', + 'get_build', + 'list_builds', + 'cancel_build', + 'retry_build', + 'create_build_trigger', + 'get_build_trigger', + 'list_build_triggers', + 'delete_build_trigger', + 'update_build_trigger', + 'run_build_trigger', + 'create_worker_pool', + 'get_worker_pool', + 'delete_worker_pool', + 'update_worker_pool', + 'list_worker_pools', + ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -3026,30 +3245,27 @@ def test_cloud_build_base_transport(): def test_cloud_build_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object( - auth, "load_credentials_from_file" - ) as load_creds, mock.patch( - "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" - ) as Transport: + with mock.patch.object(auth, 'load_credentials_from_file') as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudBuildTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=("https://www.googleapis.com/auth/cloud-platform",), + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), quota_project_id="octopus", ) def test_cloud_build_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) CloudBuildClient() - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform',), quota_project_id=None, ) @@ -3057,39 +3273,32 @@ def test_cloud_build_auth_adc(): def test_cloud_build_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" - ) - adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",), + transports.CloudBuildGrpcTransport(host="squid.clam.whelk", quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform',), quota_project_id="octopus", ) - def test_cloud_build_host_no_port(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="cloudbuild.googleapis.com" - ), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), ) - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client._transport._host == 'cloudbuild.googleapis.com:443' def test_cloud_build_host_with_port(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="cloudbuild.googleapis.com:8000" - ), + client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), ) - assert client._transport._host == "cloudbuild.googleapis.com:8000" + assert client._transport._host == 'cloudbuild.googleapis.com:8000' def test_cloud_build_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.insecure_channel('http://localhost/') # Check that if channel is provided, mtls endpoint and client_cert_source # won't be used. @@ -3106,7 +3315,7 @@ def test_cloud_build_grpc_transport_channel(): def test_cloud_build_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.insecure_channel('http://localhost/') # Check that if channel is provided, mtls endpoint and client_cert_source # won't be used. @@ -3150,7 +3359,9 @@ def test_cloud_build_grpc_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3185,7 +3396,9 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_client_cert_source "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3222,7 +3435,9 @@ def test_cloud_build_grpc_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3259,7 +3474,9 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3268,12 +3485,16 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( def test_cloud_build_grpc_lro_client(): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=credentials.AnonymousCredentials(), + transport='grpc', ) transport = client._transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3281,12 +3502,16 @@ def test_cloud_build_grpc_lro_client(): def test_cloud_build_grpc_lro_async_client(): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=credentials.AnonymousCredentials(), + transport='grpc_asyncio', ) transport = client._client._transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client From aa6ead90869921892476cb44e1afacbaa2dee2ed Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 31 Jul 2020 08:56:00 -0700 Subject: [PATCH 3/6] Revert "feat(python-library): changes to docs job (#700)" This reverts commit f07cb4446192952f19be3056957f56d180586055. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Jul 29 17:33:57 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: ee7506d15daa3873accfff9430eff7e3953f0248 Source-Link: https://github.com/googleapis/synthtool/commit/ee7506d15daa3873accfff9430eff7e3953f0248 --- docs/conf.py | 71 +- google/cloud/devtools/cloudbuild/__init__.py | 118 +- .../cloud/devtools/cloudbuild_v1/__init__.py | 88 +- .../services/cloud_build/__init__.py | 4 +- .../services/cloud_build/async_client.py | 534 ++--- .../services/cloud_build/client.py | 533 ++--- .../services/cloud_build/pagers.py | 68 +- .../cloud_build/transports/__init__.py | 10 +- .../services/cloud_build/transports/base.py | 293 ++- .../services/cloud_build/transports/grpc.py | 295 +-- .../cloud_build/transports/grpc_asyncio.py | 311 +-- .../devtools/cloudbuild_v1/types/__init__.py | 132 +- .../cloudbuild_v1/types/cloudbuild.py | 368 ++- synth.metadata | 2 +- .../gapic/cloudbuild_v1/test_cloud_build.py | 1977 ++++++++--------- 15 files changed, 2224 insertions(+), 2580 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 97a8f600..2d0ebebb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,22 +1,6 @@ # -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# -# google-devtools-cloudbuild documentation build configuration file +# google-cloud-build documentation build configuration file # # This file is execfile()d with the current directory set to its # containing dir. @@ -36,7 +20,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +__version__ = "" # -- General configuration ------------------------------------------------ @@ -54,23 +38,21 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -80,9 +62,9 @@ master_doc = "index" # General information about the project. -project = u"google-devtools-cloudbuild" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +project = u"google-cloud-build" +copyright = u"2019, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -148,9 +130,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Devtools Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-build", "github_user": "googleapis", - "github_repo": "google-cloud-python", + "github_repo": "python-cloudbuild", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -179,7 +161,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -242,7 +224,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "google-devtools-cloudbuild-doc" +htmlhelp_basename = "google-cloud-build-doc" # -- Options for warnings ------------------------------------------------------ @@ -260,13 +242,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', + #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', + #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # 'preamble': '', + #'preamble': '', # Latex figure (float) alignment - # 'figure_align': 'htbp', + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -275,8 +257,8 @@ latex_documents = [ ( master_doc, - "google-devtools-cloudbuild.tex", - u"google-devtools-cloudbuild Documentation", + "google-cloud-build.tex", + u"google-cloud-build Documentation", author, "manual", ) @@ -310,8 +292,8 @@ man_pages = [ ( master_doc, - "google-devtools-cloudbuild", - u"Google Devtools Cloudbuild Documentation", + "google-cloud-build", + u"google-cloud-build Documentation", [author], 1, ) @@ -329,11 +311,11 @@ texinfo_documents = [ ( master_doc, - "google-devtools-cloudbuild", - u"google-devtools-cloudbuild Documentation", + "google-cloud-build", + u"google-cloud-build Documentation", author, - "google-devtools-cloudbuild", - "GAPIC library for Google Devtools Cloudbuild API", + "google-cloud-build", + "google-cloud-build Library", "APIs", ) ] @@ -354,12 +336,9 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), } diff --git a/google/cloud/devtools/cloudbuild/__init__.py b/google/cloud/devtools/cloudbuild/__init__.py index 75bb1b23..a8ad9f88 100644 --- a/google/cloud/devtools/cloudbuild/__init__.py +++ b/google/cloud/devtools/cloudbuild/__init__.py @@ -15,8 +15,12 @@ # limitations under the License. # -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import CloudBuildAsyncClient -from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.async_client import ( + CloudBuildAsyncClient, +) +from google.cloud.devtools.cloudbuild_v1.services.cloud_build.client import ( + CloudBuildClient, +) from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ArtifactResult from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Artifacts from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Build @@ -27,9 +31,13 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import BuiltImage from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CancelBuildRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( + CreateBuildTriggerRequest, +) from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import CreateWorkerPoolRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( + DeleteBuildTriggerRequest, +) from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import DeleteWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import FileHashes from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetBuildRequest @@ -37,8 +45,12 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GetWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import GitHubEventsConfig from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Hash -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersRequest -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildTriggersResponse +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( + ListBuildTriggersRequest, +) +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( + ListBuildTriggersResponse, +) from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListBuildsResponse from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ListWorkerPoolsRequest @@ -55,56 +67,58 @@ from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import SourceProvenance from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import StorageSource from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import TimeSpan -from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateBuildTriggerRequest +from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import ( + UpdateBuildTriggerRequest, +) from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import UpdateWorkerPoolRequest from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import Volume from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerConfig from google.cloud.devtools.cloudbuild_v1.types.cloudbuild import WorkerPool __all__ = ( - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CloudBuildAsyncClient', - 'CloudBuildClient', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'Hash', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'Network', - 'PullRequestFilter', - 'PushFilter', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolRequest', - 'Volume', - 'WorkerConfig', - 'WorkerPool', + "ArtifactResult", + "Artifacts", + "Build", + "BuildOperationMetadata", + "BuildOptions", + "BuildStep", + "BuildTrigger", + "BuiltImage", + "CancelBuildRequest", + "CloudBuildAsyncClient", + "CloudBuildClient", + "CreateBuildRequest", + "CreateBuildTriggerRequest", + "CreateWorkerPoolRequest", + "DeleteBuildTriggerRequest", + "DeleteWorkerPoolRequest", + "FileHashes", + "GetBuildRequest", + "GetBuildTriggerRequest", + "GetWorkerPoolRequest", + "GitHubEventsConfig", + "Hash", + "ListBuildTriggersRequest", + "ListBuildTriggersResponse", + "ListBuildsRequest", + "ListBuildsResponse", + "ListWorkerPoolsRequest", + "ListWorkerPoolsResponse", + "Network", + "PullRequestFilter", + "PushFilter", + "RepoSource", + "Results", + "RetryBuildRequest", + "RunBuildTriggerRequest", + "Secret", + "Source", + "SourceProvenance", + "StorageSource", + "TimeSpan", + "UpdateBuildTriggerRequest", + "UpdateWorkerPoolRequest", + "Volume", + "WorkerConfig", + "WorkerPool", ) diff --git a/google/cloud/devtools/cloudbuild_v1/__init__.py b/google/cloud/devtools/cloudbuild_v1/__init__.py index d30f054f..59a2ac89 100644 --- a/google/cloud/devtools/cloudbuild_v1/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -62,48 +62,48 @@ __all__ = ( - 'ArtifactResult', - 'Artifacts', - 'Build', - 'BuildOperationMetadata', - 'BuildOptions', - 'BuildStep', - 'BuildTrigger', - 'BuiltImage', - 'CancelBuildRequest', - 'CreateBuildRequest', - 'CreateBuildTriggerRequest', - 'CreateWorkerPoolRequest', - 'DeleteBuildTriggerRequest', - 'DeleteWorkerPoolRequest', - 'FileHashes', - 'GetBuildRequest', - 'GetBuildTriggerRequest', - 'GetWorkerPoolRequest', - 'GitHubEventsConfig', - 'Hash', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', - 'Network', - 'PullRequestFilter', - 'PushFilter', - 'RepoSource', - 'Results', - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'Secret', - 'Source', - 'SourceProvenance', - 'StorageSource', - 'TimeSpan', - 'UpdateBuildTriggerRequest', - 'UpdateWorkerPoolRequest', - 'Volume', - 'WorkerConfig', - 'WorkerPool', -'CloudBuildClient', + "ArtifactResult", + "Artifacts", + "Build", + "BuildOperationMetadata", + "BuildOptions", + "BuildStep", + "BuildTrigger", + "BuiltImage", + "CancelBuildRequest", + "CreateBuildRequest", + "CreateBuildTriggerRequest", + "CreateWorkerPoolRequest", + "DeleteBuildTriggerRequest", + "DeleteWorkerPoolRequest", + "FileHashes", + "GetBuildRequest", + "GetBuildTriggerRequest", + "GetWorkerPoolRequest", + "GitHubEventsConfig", + "Hash", + "ListBuildTriggersRequest", + "ListBuildTriggersResponse", + "ListBuildsRequest", + "ListBuildsResponse", + "ListWorkerPoolsRequest", + "ListWorkerPoolsResponse", + "Network", + "PullRequestFilter", + "PushFilter", + "RepoSource", + "Results", + "RetryBuildRequest", + "RunBuildTriggerRequest", + "Secret", + "Source", + "SourceProvenance", + "StorageSource", + "TimeSpan", + "UpdateBuildTriggerRequest", + "UpdateWorkerPoolRequest", + "Volume", + "WorkerConfig", + "WorkerPool", + "CloudBuildClient", ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py index b57f50ba..51798087 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/__init__.py @@ -19,6 +19,6 @@ from .async_client import CloudBuildAsyncClient __all__ = ( - 'CloudBuildClient', - 'CloudBuildAsyncClient', + "CloudBuildClient", + "CloudBuildAsyncClient", ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 3e7c9d35..96421e96 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -21,12 +21,12 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.oauth2 import service_account # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.api_core import operation from google.api_core import operation_async @@ -59,13 +59,17 @@ class CloudBuildAsyncClient: from_service_account_file = CloudBuildClient.from_service_account_file from_service_account_json = from_service_account_file - get_transport_class = functools.partial(type(CloudBuildClient).get_transport_class, type(CloudBuildClient)) + get_transport_class = functools.partial( + type(CloudBuildClient).get_transport_class, type(CloudBuildClient) + ) - def __init__(self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = 'grpc_asyncio', - client_options: ClientOptions = None, - ) -> None: + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudBuildTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + ) -> None: """Instantiate the cloud build client. Args: @@ -97,20 +101,19 @@ def __init__(self, *, """ self._client = CloudBuildClient( - credentials=credentials, - transport=transport, - client_options=client_options, + credentials=credentials, transport=transport, client_options=client_options, ) - async def create_build(self, - request: cloudbuild.CreateBuildRequest = None, - *, - project_id: str = None, - build: cloudbuild.Build = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def create_build( + self, + request: cloudbuild.CreateBuildRequest = None, + *, + project_id: str = None, + build: cloudbuild.Build = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Starts a build with the specified configuration. This method returns a long-running ``Operation``, which includes @@ -171,8 +174,10 @@ async def create_build(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, build]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.CreateBuildRequest(request) @@ -193,12 +198,7 @@ async def create_build(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -211,15 +211,16 @@ async def create_build(self, # Done; return the response. return response - async def get_build(self, - request: cloudbuild.GetBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + async def get_build( + self, + request: cloudbuild.GetBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Returns information about a previously requested build. The ``Build`` that is returned includes its status (such as @@ -276,8 +277,10 @@ async def get_build(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.GetBuildRequest(request) @@ -298,8 +301,7 @@ async def get_build(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -307,25 +309,21 @@ async def get_build(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def list_builds(self, - request: cloudbuild.ListBuildsRequest = None, - *, - project_id: str = None, - filter: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsAsyncPager: + async def list_builds( + self, + request: cloudbuild.ListBuildsRequest = None, + *, + project_id: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsAsyncPager: r"""Lists previously requested builds. Previously requested builds may still be in-progress, or may have finished successfully or unsuccessfully. @@ -363,8 +361,10 @@ async def list_builds(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, filter]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.ListBuildsRequest(request) @@ -385,8 +385,7 @@ async def list_builds(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -394,34 +393,27 @@ async def list_builds(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBuildsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response - async def cancel_build(self, - request: cloudbuild.CancelBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + async def cancel_build( + self, + request: cloudbuild.CancelBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Cancels a build in progress. Args: @@ -474,8 +466,10 @@ async def cancel_build(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.CancelBuildRequest(request) @@ -496,25 +490,21 @@ async def cancel_build(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def retry_build(self, - request: cloudbuild.RetryBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def retry_build( + self, + request: cloudbuild.RetryBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a new build based on the specified build. This method creates a new build using the original build @@ -601,8 +591,10 @@ async def retry_build(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.RetryBuildRequest(request) @@ -623,12 +615,7 @@ async def retry_build(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -641,15 +628,16 @@ async def retry_build(self, # Done; return the response. return response - async def create_build_trigger(self, - request: cloudbuild.CreateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def create_build_trigger( + self, + request: cloudbuild.CreateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. This API is experimental. @@ -687,8 +675,10 @@ async def create_build_trigger(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.CreateBuildTriggerRequest(request) @@ -709,25 +699,21 @@ async def create_build_trigger(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def get_build_trigger(self, - request: cloudbuild.GetBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def get_build_trigger( + self, + request: cloudbuild.GetBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. This API is experimental. @@ -766,8 +752,10 @@ async def get_build_trigger(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.GetBuildTriggerRequest(request) @@ -788,8 +776,7 @@ async def get_build_trigger(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -797,24 +784,20 @@ async def get_build_trigger(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def list_build_triggers(self, - request: cloudbuild.ListBuildTriggersRequest = None, - *, - project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersAsyncPager: + async def list_build_triggers( + self, + request: cloudbuild.ListBuildTriggersRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersAsyncPager: r"""Lists existing ``BuildTrigger``\ s. This API is experimental. @@ -848,8 +831,10 @@ async def list_build_triggers(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.ListBuildTriggersRequest(request) @@ -868,8 +853,7 @@ async def list_build_triggers(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -877,34 +861,27 @@ async def list_build_triggers(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBuildTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response - async def delete_build_trigger(self, - request: cloudbuild.DeleteBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + async def delete_build_trigger( + self, + request: cloudbuild.DeleteBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -934,8 +911,10 @@ async def delete_build_trigger(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.DeleteBuildTriggerRequest(request) @@ -956,8 +935,7 @@ async def delete_build_trigger(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -966,22 +944,20 @@ async def delete_build_trigger(self, # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) - async def update_build_trigger(self, - request: cloudbuild.UpdateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + async def update_build_trigger( + self, + request: cloudbuild.UpdateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1024,8 +1000,10 @@ async def update_build_trigger(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id, trigger]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.UpdateBuildTriggerRequest(request) @@ -1048,26 +1026,22 @@ async def update_build_trigger(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def run_build_trigger(self, - request: cloudbuild.RunBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - source: cloudbuild.RepoSource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: + async def run_build_trigger( + self, + request: cloudbuild.RunBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + source: cloudbuild.RepoSource = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: r"""Runs a ``BuildTrigger`` at a particular source revision. Args: @@ -1131,8 +1105,10 @@ async def run_build_trigger(self, # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([project_id, trigger_id, source]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) request = cloudbuild.RunBuildTriggerRequest(request) @@ -1155,12 +1131,7 @@ async def run_build_trigger(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -1173,13 +1144,14 @@ async def run_build_trigger(self, # Done; return the response. return response - async def create_worker_pool(self, - request: cloudbuild.CreateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def create_worker_pool( + self, + request: cloudbuild.CreateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Creates a ``WorkerPool`` to run the builds, and returns the new worker pool. @@ -1225,23 +1197,19 @@ async def create_worker_pool(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def get_worker_pool(self, - request: cloudbuild.GetWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def get_worker_pool( + self, + request: cloudbuild.GetWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Returns information about a ``WorkerPool``. This API is experimental. @@ -1286,8 +1254,7 @@ async def get_worker_pool(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -1295,23 +1262,19 @@ async def get_worker_pool(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def delete_worker_pool(self, - request: cloudbuild.DeleteWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + async def delete_worker_pool( + self, + request: cloudbuild.DeleteWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``WorkerPool`` by its project ID and WorkerPool name. This API is experimental. @@ -1340,19 +1303,17 @@ async def delete_worker_pool(self, # Send the request. await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) - async def update_worker_pool(self, - request: cloudbuild.UpdateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + async def update_worker_pool( + self, + request: cloudbuild.UpdateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Update a ``WorkerPool``. This API is experimental. @@ -1396,23 +1357,19 @@ async def update_worker_pool(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - async def list_worker_pools(self, - request: cloudbuild.ListWorkerPoolsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ListWorkerPoolsResponse: + async def list_worker_pools( + self, + request: cloudbuild.ListWorkerPoolsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListWorkerPoolsResponse: r"""List project's ``WorkerPools``. This API is experimental. @@ -1444,8 +1401,7 @@ async def list_worker_pools(self, maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -1453,32 +1409,20 @@ async def list_worker_pools(self, ) # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - - - - - try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - 'google-devtools-cloudbuild', + "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() -__all__ = ( - 'CloudBuildAsyncClient', -) +__all__ = ("CloudBuildAsyncClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index dc819666..e11623af 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -21,14 +21,14 @@ from typing import Callable, Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore from google.api_core import operation from google.api_core import operation_async @@ -49,13 +49,12 @@ class CloudBuildClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] - _transport_registry['grpc'] = CloudBuildGrpcTransport - _transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport + _transport_registry["grpc"] = CloudBuildGrpcTransport + _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport - def get_transport_class(cls, - label: str = None, - ) -> Type[CloudBuildTransport]: + def get_transport_class(cls, label: str = None,) -> Type[CloudBuildTransport]: """Return an appropriate transport class. Args: @@ -114,7 +113,7 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - DEFAULT_ENDPOINT = 'cloudbuild.googleapis.com' + DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @@ -133,18 +132,19 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: {@api.name}: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file - def __init__(self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = None, - client_options: ClientOptions = None, - ) -> None: + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudBuildTransport] = None, + client_options: ClientOptions = None, + ) -> None: """Instantiate the cloud build client. Args: @@ -191,7 +191,9 @@ def __init__(self, *, or mtls.has_default_client_cert_source() ) client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( @@ -204,8 +206,10 @@ def __init__(self, *, if isinstance(transport, CloudBuildTransport): # transport is a CloudBuildTransport instance. if credentials or client_options.credentials_file: - raise ValueError('When providing a transport instance, ' - 'provide its credentials directly.') + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if client_options.scopes: raise ValueError( "When providing a transport instance, " @@ -224,15 +228,16 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, ) - def create_build(self, - request: cloudbuild.CreateBuildRequest = None, - *, - project_id: str = None, - build: cloudbuild.Build = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_build( + self, + request: cloudbuild.CreateBuildRequest = None, + *, + project_id: str = None, + build: cloudbuild.Build = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Starts a build with the specified configuration. This method returns a long-running ``Operation``, which includes @@ -294,8 +299,10 @@ def create_build(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, build]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CreateBuildRequest. @@ -317,12 +324,7 @@ def create_build(self, rpc = self._transport._wrapped_methods[self._transport.create_build] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -335,15 +337,16 @@ def create_build(self, # Done; return the response. return response - def get_build(self, - request: cloudbuild.GetBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + def get_build( + self, + request: cloudbuild.GetBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Returns information about a previously requested build. The ``Build`` that is returned includes its status (such as @@ -401,8 +404,10 @@ def get_build(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.GetBuildRequest. @@ -424,25 +429,21 @@ def get_build(self, rpc = self._transport._wrapped_methods[self._transport.get_build] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def list_builds(self, - request: cloudbuild.ListBuildsRequest = None, - *, - project_id: str = None, - filter: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildsPager: + def list_builds( + self, + request: cloudbuild.ListBuildsRequest = None, + *, + project_id: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildsPager: r"""Lists previously requested builds. Previously requested builds may still be in-progress, or may have finished successfully or unsuccessfully. @@ -481,8 +482,10 @@ def list_builds(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, filter]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.ListBuildsRequest. @@ -504,34 +507,27 @@ def list_builds(self, rpc = self._transport._wrapped_methods[self._transport.list_builds] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBuildsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response - def cancel_build(self, - request: cloudbuild.CancelBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.Build: + def cancel_build( + self, + request: cloudbuild.CancelBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: r"""Cancels a build in progress. Args: @@ -585,8 +581,10 @@ def cancel_build(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CancelBuildRequest. @@ -608,25 +606,21 @@ def cancel_build(self, rpc = self._transport._wrapped_methods[self._transport.cancel_build] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def retry_build(self, - request: cloudbuild.RetryBuildRequest = None, - *, - project_id: str = None, - id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def retry_build( + self, + request: cloudbuild.RetryBuildRequest = None, + *, + project_id: str = None, + id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new build based on the specified build. This method creates a new build using the original build @@ -714,8 +708,10 @@ def retry_build(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.RetryBuildRequest. @@ -737,12 +733,7 @@ def retry_build(self, rpc = self._transport._wrapped_methods[self._transport.retry_build] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -755,15 +746,16 @@ def retry_build(self, # Done; return the response. return response - def create_build_trigger(self, - request: cloudbuild.CreateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def create_build_trigger( + self, + request: cloudbuild.CreateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. This API is experimental. @@ -802,8 +794,10 @@ def create_build_trigger(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.CreateBuildTriggerRequest. @@ -825,25 +819,21 @@ def create_build_trigger(self, rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def get_build_trigger(self, - request: cloudbuild.GetBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def get_build_trigger( + self, + request: cloudbuild.GetBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. This API is experimental. @@ -883,8 +873,10 @@ def get_build_trigger(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.GetBuildTriggerRequest. @@ -906,24 +898,20 @@ def get_build_trigger(self, rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def list_build_triggers(self, - request: cloudbuild.ListBuildTriggersRequest = None, - *, - project_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBuildTriggersPager: + def list_build_triggers( + self, + request: cloudbuild.ListBuildTriggersRequest = None, + *, + project_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBuildTriggersPager: r"""Lists existing ``BuildTrigger``\ s. This API is experimental. @@ -958,8 +946,10 @@ def list_build_triggers(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.ListBuildTriggersRequest. @@ -979,34 +969,27 @@ def list_build_triggers(self, rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBuildTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response - def delete_build_trigger(self, - request: cloudbuild.DeleteBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + def delete_build_trigger( + self, + request: cloudbuild.DeleteBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1037,8 +1020,10 @@ def delete_build_trigger(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.DeleteBuildTriggerRequest. @@ -1061,22 +1046,20 @@ def delete_build_trigger(self, # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) - def update_build_trigger(self, - request: cloudbuild.UpdateBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - trigger: cloudbuild.BuildTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.BuildTrigger: + def update_build_trigger( + self, + request: cloudbuild.UpdateBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + trigger: cloudbuild.BuildTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. This API is experimental. @@ -1120,8 +1103,10 @@ def update_build_trigger(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id, trigger]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.UpdateBuildTriggerRequest. @@ -1145,26 +1130,22 @@ def update_build_trigger(self, rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def run_build_trigger(self, - request: cloudbuild.RunBuildTriggerRequest = None, - *, - project_id: str = None, - trigger_id: str = None, - source: cloudbuild.RepoSource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def run_build_trigger( + self, + request: cloudbuild.RunBuildTriggerRequest = None, + *, + project_id: str = None, + trigger_id: str = None, + source: cloudbuild.RepoSource = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Runs a ``BuildTrigger`` at a particular source revision. Args: @@ -1229,8 +1210,10 @@ def run_build_trigger(self, # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, trigger_id, source]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # Minor optimization to avoid making a copy if the user passes # in a cloudbuild.RunBuildTriggerRequest. @@ -1254,12 +1237,7 @@ def run_build_trigger(self, rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1272,13 +1250,14 @@ def run_build_trigger(self, # Done; return the response. return response - def create_worker_pool(self, - request: cloudbuild.CreateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def create_worker_pool( + self, + request: cloudbuild.CreateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Creates a ``WorkerPool`` to run the builds, and returns the new worker pool. @@ -1325,23 +1304,19 @@ def create_worker_pool(self, rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def get_worker_pool(self, - request: cloudbuild.GetWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def get_worker_pool( + self, + request: cloudbuild.GetWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Returns information about a ``WorkerPool``. This API is experimental. @@ -1387,23 +1362,19 @@ def get_worker_pool(self, rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def delete_worker_pool(self, - request: cloudbuild.DeleteWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: + def delete_worker_pool( + self, + request: cloudbuild.DeleteWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Deletes a ``WorkerPool`` by its project ID and WorkerPool name. This API is experimental. @@ -1433,19 +1404,17 @@ def delete_worker_pool(self, # Send the request. rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, + request, retry=retry, timeout=timeout, metadata=metadata, ) - def update_worker_pool(self, - request: cloudbuild.UpdateWorkerPoolRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.WorkerPool: + def update_worker_pool( + self, + request: cloudbuild.UpdateWorkerPoolRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: r"""Update a ``WorkerPool``. This API is experimental. @@ -1490,23 +1459,19 @@ def update_worker_pool(self, rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - def list_worker_pools(self, - request: cloudbuild.ListWorkerPoolsRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> cloudbuild.ListWorkerPoolsResponse: + def list_worker_pools( + self, + request: cloudbuild.ListWorkerPoolsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListWorkerPoolsResponse: r"""List project's ``WorkerPools``. This API is experimental. @@ -1539,32 +1504,20 @@ def list_worker_pools(self, rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response - - - - - try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - 'google-devtools-cloudbuild', + "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() -__all__ = ( - 'CloudBuildClient', -) +__all__ = ("CloudBuildClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py index 53593f53..52132bd1 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/pagers.py @@ -37,12 +37,15 @@ class ListBuildsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildsResponse], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., cloudbuild.ListBuildsResponse], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -76,7 +79,7 @@ def __iter__(self) -> Iterable[cloudbuild.Build]: yield from page.builds def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBuildsAsyncPager: @@ -96,12 +99,15 @@ class ListBuildsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], - request: cloudbuild.ListBuildsRequest, - response: cloudbuild.ListBuildsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloudbuild.ListBuildsResponse]], + request: cloudbuild.ListBuildsRequest, + response: cloudbuild.ListBuildsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -139,7 +145,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBuildTriggersPager: @@ -159,12 +165,15 @@ class ListBuildTriggersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloudbuild.ListBuildTriggersResponse], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., cloudbuild.ListBuildTriggersResponse], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -198,7 +207,7 @@ def __iter__(self) -> Iterable[cloudbuild.BuildTrigger]: yield from page.triggers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBuildTriggersAsyncPager: @@ -218,12 +227,15 @@ class ListBuildTriggersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], - request: cloudbuild.ListBuildTriggersRequest, - response: cloudbuild.ListBuildTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloudbuild.ListBuildTriggersResponse]], + request: cloudbuild.ListBuildTriggersRequest, + response: cloudbuild.ListBuildTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -261,4 +273,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py index 7239ab73..cf9e2143 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py @@ -25,12 +25,12 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] -_transport_registry['grpc'] = CloudBuildGrpcTransport -_transport_registry['grpc_asyncio'] = CloudBuildGrpcAsyncIOTransport +_transport_registry["grpc"] = CloudBuildGrpcTransport +_transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport __all__ = ( - 'CloudBuildTransport', - 'CloudBuildGrpcTransport', - 'CloudBuildGrpcAsyncIOTransport', + "CloudBuildTransport", + "CloudBuildGrpcTransport", + "CloudBuildGrpcAsyncIOTransport", ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index 3b1b21ee..32d9f153 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -21,7 +21,7 @@ from google import auth from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -34,28 +34,28 @@ try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - 'google-devtools-cloudbuild', + "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: _client_info = gapic_v1.client_info.ClientInfo() + class CloudBuildTransport(abc.ABC): """Abstract transport class for CloudBuild.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) def __init__( - self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = "cloudbuild.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -73,24 +73,26 @@ def __init__( and quota. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id - ) + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) elif credentials is None: - credentials, _ = auth.default(scopes=scopes, quota_project_id=quota_project_id) + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) # Save the credentials. self._credentials = credentials @@ -102,9 +104,7 @@ def _prep_wrapped_messages(self): # Precompute the wrapped methods. self._wrapped_methods = { self.create_build: gapic_v1.method.wrap_method( - self.create_build, - default_timeout=600.0, - client_info=_client_info, + self.create_build, default_timeout=600.0, client_info=_client_info, ), self.get_build: gapic_v1.method.wrap_method( self.get_build, @@ -113,8 +113,7 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -127,22 +126,17 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, client_info=_client_info, ), self.cancel_build: gapic_v1.method.wrap_method( - self.cancel_build, - default_timeout=600.0, - client_info=_client_info, + self.cancel_build, default_timeout=600.0, client_info=_client_info, ), self.retry_build: gapic_v1.method.wrap_method( - self.retry_build, - default_timeout=600.0, - client_info=_client_info, + self.retry_build, default_timeout=600.0, client_info=_client_info, ), self.create_build_trigger: gapic_v1.method.wrap_method( self.create_build_trigger, @@ -156,8 +150,7 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -170,8 +163,7 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -184,8 +176,7 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -197,9 +188,7 @@ def _prep_wrapped_messages(self): client_info=_client_info, ), self.run_build_trigger: gapic_v1.method.wrap_method( - self.run_build_trigger, - default_timeout=600.0, - client_info=_client_info, + self.run_build_trigger, default_timeout=600.0, client_info=_client_info, ), self.create_worker_pool: gapic_v1.method.wrap_method( self.create_worker_pool, @@ -213,8 +202,7 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, @@ -237,14 +225,12 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, -exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=600.0, client_info=_client_info, ), - } @property @@ -253,150 +239,163 @@ def operations_client(self) -> operations_v1.OperationsClient: raise NotImplementedError() @property - def create_build(self) -> typing.Callable[ - [cloudbuild.CreateBuildRequest], - typing.Union[ - operations.Operation, - typing.Awaitable[operations.Operation] - ]]: + def create_build( + self, + ) -> typing.Callable[ + [cloudbuild.CreateBuildRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: raise NotImplementedError() @property - def get_build(self) -> typing.Callable[ - [cloudbuild.GetBuildRequest], - typing.Union[ - cloudbuild.Build, - typing.Awaitable[cloudbuild.Build] - ]]: + def get_build( + self, + ) -> typing.Callable[ + [cloudbuild.GetBuildRequest], + typing.Union[cloudbuild.Build, typing.Awaitable[cloudbuild.Build]], + ]: raise NotImplementedError() @property - def list_builds(self) -> typing.Callable[ - [cloudbuild.ListBuildsRequest], - typing.Union[ - cloudbuild.ListBuildsResponse, - typing.Awaitable[cloudbuild.ListBuildsResponse] - ]]: + def list_builds( + self, + ) -> typing.Callable[ + [cloudbuild.ListBuildsRequest], + typing.Union[ + cloudbuild.ListBuildsResponse, + typing.Awaitable[cloudbuild.ListBuildsResponse], + ], + ]: raise NotImplementedError() @property - def cancel_build(self) -> typing.Callable[ - [cloudbuild.CancelBuildRequest], - typing.Union[ - cloudbuild.Build, - typing.Awaitable[cloudbuild.Build] - ]]: + def cancel_build( + self, + ) -> typing.Callable[ + [cloudbuild.CancelBuildRequest], + typing.Union[cloudbuild.Build, typing.Awaitable[cloudbuild.Build]], + ]: raise NotImplementedError() @property - def retry_build(self) -> typing.Callable[ - [cloudbuild.RetryBuildRequest], - typing.Union[ - operations.Operation, - typing.Awaitable[operations.Operation] - ]]: + def retry_build( + self, + ) -> typing.Callable[ + [cloudbuild.RetryBuildRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: raise NotImplementedError() @property - def create_build_trigger(self) -> typing.Callable[ - [cloudbuild.CreateBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, - typing.Awaitable[cloudbuild.BuildTrigger] - ]]: + def create_build_trigger( + self, + ) -> typing.Callable[ + [cloudbuild.CreateBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] + ], + ]: raise NotImplementedError() @property - def get_build_trigger(self) -> typing.Callable[ - [cloudbuild.GetBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, - typing.Awaitable[cloudbuild.BuildTrigger] - ]]: + def get_build_trigger( + self, + ) -> typing.Callable[ + [cloudbuild.GetBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] + ], + ]: raise NotImplementedError() @property - def list_build_triggers(self) -> typing.Callable[ - [cloudbuild.ListBuildTriggersRequest], - typing.Union[ - cloudbuild.ListBuildTriggersResponse, - typing.Awaitable[cloudbuild.ListBuildTriggersResponse] - ]]: + def list_build_triggers( + self, + ) -> typing.Callable[ + [cloudbuild.ListBuildTriggersRequest], + typing.Union[ + cloudbuild.ListBuildTriggersResponse, + typing.Awaitable[cloudbuild.ListBuildTriggersResponse], + ], + ]: raise NotImplementedError() @property - def delete_build_trigger(self) -> typing.Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - typing.Union[ - empty.Empty, - typing.Awaitable[empty.Empty] - ]]: + def delete_build_trigger( + self, + ) -> typing.Callable[ + [cloudbuild.DeleteBuildTriggerRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: raise NotImplementedError() @property - def update_build_trigger(self) -> typing.Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - typing.Union[ - cloudbuild.BuildTrigger, - typing.Awaitable[cloudbuild.BuildTrigger] - ]]: + def update_build_trigger( + self, + ) -> typing.Callable[ + [cloudbuild.UpdateBuildTriggerRequest], + typing.Union[ + cloudbuild.BuildTrigger, typing.Awaitable[cloudbuild.BuildTrigger] + ], + ]: raise NotImplementedError() @property - def run_build_trigger(self) -> typing.Callable[ - [cloudbuild.RunBuildTriggerRequest], - typing.Union[ - operations.Operation, - typing.Awaitable[operations.Operation] - ]]: + def run_build_trigger( + self, + ) -> typing.Callable[ + [cloudbuild.RunBuildTriggerRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: raise NotImplementedError() @property - def create_worker_pool(self) -> typing.Callable[ - [cloudbuild.CreateWorkerPoolRequest], - typing.Union[ - cloudbuild.WorkerPool, - typing.Awaitable[cloudbuild.WorkerPool] - ]]: + def create_worker_pool( + self, + ) -> typing.Callable[ + [cloudbuild.CreateWorkerPoolRequest], + typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], + ]: raise NotImplementedError() @property - def get_worker_pool(self) -> typing.Callable[ - [cloudbuild.GetWorkerPoolRequest], - typing.Union[ - cloudbuild.WorkerPool, - typing.Awaitable[cloudbuild.WorkerPool] - ]]: + def get_worker_pool( + self, + ) -> typing.Callable[ + [cloudbuild.GetWorkerPoolRequest], + typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], + ]: raise NotImplementedError() @property - def delete_worker_pool(self) -> typing.Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - typing.Union[ - empty.Empty, - typing.Awaitable[empty.Empty] - ]]: + def delete_worker_pool( + self, + ) -> typing.Callable[ + [cloudbuild.DeleteWorkerPoolRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: raise NotImplementedError() @property - def update_worker_pool(self) -> typing.Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - typing.Union[ - cloudbuild.WorkerPool, - typing.Awaitable[cloudbuild.WorkerPool] - ]]: + def update_worker_pool( + self, + ) -> typing.Callable[ + [cloudbuild.UpdateWorkerPoolRequest], + typing.Union[cloudbuild.WorkerPool, typing.Awaitable[cloudbuild.WorkerPool]], + ]: raise NotImplementedError() @property - def list_worker_pools(self) -> typing.Callable[ - [cloudbuild.ListWorkerPoolsRequest], - typing.Union[ - cloudbuild.ListWorkerPoolsResponse, - typing.Awaitable[cloudbuild.ListWorkerPoolsResponse] - ]]: + def list_worker_pools( + self, + ) -> typing.Callable[ + [cloudbuild.ListWorkerPoolsRequest], + typing.Union[ + cloudbuild.ListWorkerPoolsResponse, + typing.Awaitable[cloudbuild.ListWorkerPoolsResponse], + ], + ]: raise NotImplementedError() -__all__ = ( - 'CloudBuildTransport', -) +__all__ = ("CloudBuildTransport",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index adb9944b..c8affe84 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -17,10 +17,10 @@ from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import grpc_helpers # type: ignore +from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -52,17 +52,21 @@ class CloudBuildGrpcTransport(CloudBuildTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None) -> None: + def __init__( + self, + *, + host: str = "cloudbuild.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: """Instantiate the transport. Args: @@ -105,10 +109,16 @@ def __init__(self, *, # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -142,13 +152,15 @@ def __init__(self, *, ) @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "cloudbuild.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: address (Optionsl[str]): The host for the channel to use. @@ -193,10 +205,9 @@ def grpc_channel(self) -> grpc.Channel: """ # Sanity check: Only create a new channel if we do not already # have one. - if not hasattr(self, '_grpc_channel'): + if not hasattr(self, "_grpc_channel"): self._grpc_channel = self.create_channel( - self._host, - credentials=self._credentials, + self._host, credentials=self._credentials, ) # Return the channel from cache. @@ -210,18 +221,18 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - self.__dict__['operations_client'] = operations_v1.OperationsClient( + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__['operations_client'] + return self.__dict__["operations_client"] @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - operations.Operation]: + def create_build( + self, + ) -> Callable[[cloudbuild.CreateBuildRequest], operations.Operation]: r"""Return a callable for the create build method over gRPC. Starts a build with the specified configuration. @@ -240,18 +251,16 @@ def create_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + if "create_build" not in self._stubs: + self._stubs["create_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", request_serializer=cloudbuild.CreateBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['create_build'] + return self._stubs["create_build"] @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - cloudbuild.Build]: + def get_build(self) -> Callable[[cloudbuild.GetBuildRequest], cloudbuild.Build]: r"""Return a callable for the get build method over gRPC. Returns information about a previously requested build. @@ -270,18 +279,18 @@ def get_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + if "get_build" not in self._stubs: + self._stubs["get_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", request_serializer=cloudbuild.GetBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs['get_build'] + return self._stubs["get_build"] @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - cloudbuild.ListBuildsResponse]: + def list_builds( + self, + ) -> Callable[[cloudbuild.ListBuildsRequest], cloudbuild.ListBuildsResponse]: r"""Return a callable for the list builds method over gRPC. Lists previously requested builds. @@ -298,18 +307,18 @@ def list_builds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + if "list_builds" not in self._stubs: + self._stubs["list_builds"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", request_serializer=cloudbuild.ListBuildsRequest.serialize, response_deserializer=cloudbuild.ListBuildsResponse.deserialize, ) - return self._stubs['list_builds'] + return self._stubs["list_builds"] @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - cloudbuild.Build]: + def cancel_build( + self, + ) -> Callable[[cloudbuild.CancelBuildRequest], cloudbuild.Build]: r"""Return a callable for the cancel build method over gRPC. Cancels a build in progress. @@ -324,18 +333,18 @@ def cancel_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + if "cancel_build" not in self._stubs: + self._stubs["cancel_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", request_serializer=cloudbuild.CancelBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs['cancel_build'] + return self._stubs["cancel_build"] @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - operations.Operation]: + def retry_build( + self, + ) -> Callable[[cloudbuild.RetryBuildRequest], operations.Operation]: r"""Return a callable for the retry build method over gRPC. Creates a new build based on the specified build. @@ -379,18 +388,18 @@ def retry_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + if "retry_build" not in self._stubs: + self._stubs["retry_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", request_serializer=cloudbuild.RetryBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['retry_build'] + return self._stubs["retry_build"] @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - cloudbuild.BuildTrigger]: + def create_build_trigger( + self, + ) -> Callable[[cloudbuild.CreateBuildTriggerRequest], cloudbuild.BuildTrigger]: r"""Return a callable for the create build trigger method over gRPC. Creates a new ``BuildTrigger``. @@ -407,18 +416,18 @@ def create_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + if "create_build_trigger" not in self._stubs: + self._stubs["create_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['create_build_trigger'] + return self._stubs["create_build_trigger"] @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - cloudbuild.BuildTrigger]: + def get_build_trigger( + self, + ) -> Callable[[cloudbuild.GetBuildTriggerRequest], cloudbuild.BuildTrigger]: r"""Return a callable for the get build trigger method over gRPC. Returns information about a ``BuildTrigger``. @@ -435,18 +444,20 @@ def get_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + if "get_build_trigger" not in self._stubs: + self._stubs["get_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['get_build_trigger'] + return self._stubs["get_build_trigger"] @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - cloudbuild.ListBuildTriggersResponse]: + def list_build_triggers( + self, + ) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], cloudbuild.ListBuildTriggersResponse + ]: r"""Return a callable for the list build triggers method over gRPC. Lists existing ``BuildTrigger``\ s. @@ -463,18 +474,18 @@ def list_build_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + if "list_build_triggers" not in self._stubs: + self._stubs["list_build_triggers"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, ) - return self._stubs['list_build_triggers'] + return self._stubs["list_build_triggers"] @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - empty.Empty]: + def delete_build_trigger( + self, + ) -> Callable[[cloudbuild.DeleteBuildTriggerRequest], empty.Empty]: r"""Return a callable for the delete build trigger method over gRPC. Deletes a ``BuildTrigger`` by its project ID and trigger ID. @@ -491,18 +502,18 @@ def delete_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + if "delete_build_trigger" not in self._stubs: + self._stubs["delete_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs['delete_build_trigger'] + return self._stubs["delete_build_trigger"] @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - cloudbuild.BuildTrigger]: + def update_build_trigger( + self, + ) -> Callable[[cloudbuild.UpdateBuildTriggerRequest], cloudbuild.BuildTrigger]: r"""Return a callable for the update build trigger method over gRPC. Updates a ``BuildTrigger`` by its project ID and trigger ID. @@ -519,18 +530,18 @@ def update_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + if "update_build_trigger" not in self._stubs: + self._stubs["update_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['update_build_trigger'] + return self._stubs["update_build_trigger"] @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - operations.Operation]: + def run_build_trigger( + self, + ) -> Callable[[cloudbuild.RunBuildTriggerRequest], operations.Operation]: r"""Return a callable for the run build trigger method over gRPC. Runs a ``BuildTrigger`` at a particular source revision. @@ -545,18 +556,18 @@ def run_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + if "run_build_trigger" not in self._stubs: + self._stubs["run_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['run_build_trigger'] + return self._stubs["run_build_trigger"] @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - cloudbuild.WorkerPool]: + def create_worker_pool( + self, + ) -> Callable[[cloudbuild.CreateWorkerPoolRequest], cloudbuild.WorkerPool]: r"""Return a callable for the create worker pool method over gRPC. Creates a ``WorkerPool`` to run the builds, and returns the new @@ -574,18 +585,18 @@ def create_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + if "create_worker_pool" not in self._stubs: + self._stubs["create_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool", request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['create_worker_pool'] + return self._stubs["create_worker_pool"] @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - cloudbuild.WorkerPool]: + def get_worker_pool( + self, + ) -> Callable[[cloudbuild.GetWorkerPoolRequest], cloudbuild.WorkerPool]: r"""Return a callable for the get worker pool method over gRPC. Returns information about a ``WorkerPool``. @@ -602,18 +613,18 @@ def get_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + if "get_worker_pool" not in self._stubs: + self._stubs["get_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool", request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['get_worker_pool'] + return self._stubs["get_worker_pool"] @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - empty.Empty]: + def delete_worker_pool( + self, + ) -> Callable[[cloudbuild.DeleteWorkerPoolRequest], empty.Empty]: r"""Return a callable for the delete worker pool method over gRPC. Deletes a ``WorkerPool`` by its project ID and WorkerPool name. @@ -630,18 +641,18 @@ def delete_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + if "delete_worker_pool" not in self._stubs: + self._stubs["delete_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool", request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs['delete_worker_pool'] + return self._stubs["delete_worker_pool"] @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - cloudbuild.WorkerPool]: + def update_worker_pool( + self, + ) -> Callable[[cloudbuild.UpdateWorkerPoolRequest], cloudbuild.WorkerPool]: r"""Return a callable for the update worker pool method over gRPC. Update a ``WorkerPool``. @@ -658,18 +669,20 @@ def update_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + if "update_worker_pool" not in self._stubs: + self._stubs["update_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool", request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['update_worker_pool'] + return self._stubs["update_worker_pool"] @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - cloudbuild.ListWorkerPoolsResponse]: + def list_worker_pools( + self, + ) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], cloudbuild.ListWorkerPoolsResponse + ]: r"""Return a callable for the list worker pools method over gRPC. List project's ``WorkerPools``. @@ -686,15 +699,13 @@ def list_worker_pools(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + if "list_worker_pools" not in self._stubs: + self._stubs["list_worker_pools"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools", request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, ) - return self._stubs['list_worker_pools'] + return self._stubs["list_worker_pools"] -__all__ = ( - 'CloudBuildGrpcTransport', -) +__all__ = ("CloudBuildGrpcTransport",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 98100d53..817e30a5 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -17,12 +17,12 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.devtools.cloudbuild_v1.types import cloudbuild @@ -57,13 +57,15 @@ class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'cloudbuild.googleapis.com', - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "cloudbuild.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: address (Optional[str]): The host for the channel to use. @@ -92,19 +94,21 @@ def create_channel(cls, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'cloudbuild.googleapis.com', - credentials: credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudbuild.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: """Instantiate the transport. Args: @@ -148,7 +152,11 @@ def __init__(self, *, # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -190,10 +198,9 @@ def grpc_channel(self) -> aio.Channel: """ # Sanity check: Only create a new channel if we do not already # have one. - if not hasattr(self, '_grpc_channel'): + if not hasattr(self, "_grpc_channel"): self._grpc_channel = self.create_channel( - self._host, - credentials=self._credentials, + self._host, credentials=self._credentials, ) # Return the channel from cache. @@ -207,18 +214,18 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - self.__dict__['operations_client'] = operations_v1.OperationsAsyncClient( + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__['operations_client'] + return self.__dict__["operations_client"] @property - def create_build(self) -> Callable[ - [cloudbuild.CreateBuildRequest], - Awaitable[operations.Operation]]: + def create_build( + self, + ) -> Callable[[cloudbuild.CreateBuildRequest], Awaitable[operations.Operation]]: r"""Return a callable for the create build method over gRPC. Starts a build with the specified configuration. @@ -237,18 +244,18 @@ def create_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_build' not in self._stubs: - self._stubs['create_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild', + if "create_build" not in self._stubs: + self._stubs["create_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", request_serializer=cloudbuild.CreateBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['create_build'] + return self._stubs["create_build"] @property - def get_build(self) -> Callable[ - [cloudbuild.GetBuildRequest], - Awaitable[cloudbuild.Build]]: + def get_build( + self, + ) -> Callable[[cloudbuild.GetBuildRequest], Awaitable[cloudbuild.Build]]: r"""Return a callable for the get build method over gRPC. Returns information about a previously requested build. @@ -267,18 +274,20 @@ def get_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_build' not in self._stubs: - self._stubs['get_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuild', + if "get_build" not in self._stubs: + self._stubs["get_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", request_serializer=cloudbuild.GetBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs['get_build'] + return self._stubs["get_build"] @property - def list_builds(self) -> Callable[ - [cloudbuild.ListBuildsRequest], - Awaitable[cloudbuild.ListBuildsResponse]]: + def list_builds( + self, + ) -> Callable[ + [cloudbuild.ListBuildsRequest], Awaitable[cloudbuild.ListBuildsResponse] + ]: r"""Return a callable for the list builds method over gRPC. Lists previously requested builds. @@ -295,18 +304,18 @@ def list_builds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_builds' not in self._stubs: - self._stubs['list_builds'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds', + if "list_builds" not in self._stubs: + self._stubs["list_builds"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", request_serializer=cloudbuild.ListBuildsRequest.serialize, response_deserializer=cloudbuild.ListBuildsResponse.deserialize, ) - return self._stubs['list_builds'] + return self._stubs["list_builds"] @property - def cancel_build(self) -> Callable[ - [cloudbuild.CancelBuildRequest], - Awaitable[cloudbuild.Build]]: + def cancel_build( + self, + ) -> Callable[[cloudbuild.CancelBuildRequest], Awaitable[cloudbuild.Build]]: r"""Return a callable for the cancel build method over gRPC. Cancels a build in progress. @@ -321,18 +330,18 @@ def cancel_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'cancel_build' not in self._stubs: - self._stubs['cancel_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild', + if "cancel_build" not in self._stubs: + self._stubs["cancel_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", request_serializer=cloudbuild.CancelBuildRequest.serialize, response_deserializer=cloudbuild.Build.deserialize, ) - return self._stubs['cancel_build'] + return self._stubs["cancel_build"] @property - def retry_build(self) -> Callable[ - [cloudbuild.RetryBuildRequest], - Awaitable[operations.Operation]]: + def retry_build( + self, + ) -> Callable[[cloudbuild.RetryBuildRequest], Awaitable[operations.Operation]]: r"""Return a callable for the retry build method over gRPC. Creates a new build based on the specified build. @@ -376,18 +385,20 @@ def retry_build(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'retry_build' not in self._stubs: - self._stubs['retry_build'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild', + if "retry_build" not in self._stubs: + self._stubs["retry_build"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", request_serializer=cloudbuild.RetryBuildRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['retry_build'] + return self._stubs["retry_build"] @property - def create_build_trigger(self) -> Callable[ - [cloudbuild.CreateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: + def create_build_trigger( + self, + ) -> Callable[ + [cloudbuild.CreateBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] + ]: r"""Return a callable for the create build trigger method over gRPC. Creates a new ``BuildTrigger``. @@ -404,18 +415,20 @@ def create_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_build_trigger' not in self._stubs: - self._stubs['create_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger', + if "create_build_trigger" not in self._stubs: + self._stubs["create_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", request_serializer=cloudbuild.CreateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['create_build_trigger'] + return self._stubs["create_build_trigger"] @property - def get_build_trigger(self) -> Callable[ - [cloudbuild.GetBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: + def get_build_trigger( + self, + ) -> Callable[ + [cloudbuild.GetBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] + ]: r"""Return a callable for the get build trigger method over gRPC. Returns information about a ``BuildTrigger``. @@ -432,18 +445,21 @@ def get_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_build_trigger' not in self._stubs: - self._stubs['get_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger', + if "get_build_trigger" not in self._stubs: + self._stubs["get_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", request_serializer=cloudbuild.GetBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['get_build_trigger'] + return self._stubs["get_build_trigger"] @property - def list_build_triggers(self) -> Callable[ - [cloudbuild.ListBuildTriggersRequest], - Awaitable[cloudbuild.ListBuildTriggersResponse]]: + def list_build_triggers( + self, + ) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], + Awaitable[cloudbuild.ListBuildTriggersResponse], + ]: r"""Return a callable for the list build triggers method over gRPC. Lists existing ``BuildTrigger``\ s. @@ -460,18 +476,18 @@ def list_build_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_build_triggers' not in self._stubs: - self._stubs['list_build_triggers'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers', + if "list_build_triggers" not in self._stubs: + self._stubs["list_build_triggers"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", request_serializer=cloudbuild.ListBuildTriggersRequest.serialize, response_deserializer=cloudbuild.ListBuildTriggersResponse.deserialize, ) - return self._stubs['list_build_triggers'] + return self._stubs["list_build_triggers"] @property - def delete_build_trigger(self) -> Callable[ - [cloudbuild.DeleteBuildTriggerRequest], - Awaitable[empty.Empty]]: + def delete_build_trigger( + self, + ) -> Callable[[cloudbuild.DeleteBuildTriggerRequest], Awaitable[empty.Empty]]: r"""Return a callable for the delete build trigger method over gRPC. Deletes a ``BuildTrigger`` by its project ID and trigger ID. @@ -488,18 +504,20 @@ def delete_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_build_trigger' not in self._stubs: - self._stubs['delete_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger', + if "delete_build_trigger" not in self._stubs: + self._stubs["delete_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", request_serializer=cloudbuild.DeleteBuildTriggerRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs['delete_build_trigger'] + return self._stubs["delete_build_trigger"] @property - def update_build_trigger(self) -> Callable[ - [cloudbuild.UpdateBuildTriggerRequest], - Awaitable[cloudbuild.BuildTrigger]]: + def update_build_trigger( + self, + ) -> Callable[ + [cloudbuild.UpdateBuildTriggerRequest], Awaitable[cloudbuild.BuildTrigger] + ]: r"""Return a callable for the update build trigger method over gRPC. Updates a ``BuildTrigger`` by its project ID and trigger ID. @@ -516,18 +534,18 @@ def update_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_build_trigger' not in self._stubs: - self._stubs['update_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger', + if "update_build_trigger" not in self._stubs: + self._stubs["update_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", request_serializer=cloudbuild.UpdateBuildTriggerRequest.serialize, response_deserializer=cloudbuild.BuildTrigger.deserialize, ) - return self._stubs['update_build_trigger'] + return self._stubs["update_build_trigger"] @property - def run_build_trigger(self) -> Callable[ - [cloudbuild.RunBuildTriggerRequest], - Awaitable[operations.Operation]]: + def run_build_trigger( + self, + ) -> Callable[[cloudbuild.RunBuildTriggerRequest], Awaitable[operations.Operation]]: r"""Return a callable for the run build trigger method over gRPC. Runs a ``BuildTrigger`` at a particular source revision. @@ -542,18 +560,20 @@ def run_build_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'run_build_trigger' not in self._stubs: - self._stubs['run_build_trigger'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger', + if "run_build_trigger" not in self._stubs: + self._stubs["run_build_trigger"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", request_serializer=cloudbuild.RunBuildTriggerRequest.serialize, response_deserializer=operations.Operation.FromString, ) - return self._stubs['run_build_trigger'] + return self._stubs["run_build_trigger"] @property - def create_worker_pool(self) -> Callable[ - [cloudbuild.CreateWorkerPoolRequest], - Awaitable[cloudbuild.WorkerPool]]: + def create_worker_pool( + self, + ) -> Callable[ + [cloudbuild.CreateWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool] + ]: r"""Return a callable for the create worker pool method over gRPC. Creates a ``WorkerPool`` to run the builds, and returns the new @@ -571,18 +591,18 @@ def create_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_worker_pool' not in self._stubs: - self._stubs['create_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool', + if "create_worker_pool" not in self._stubs: + self._stubs["create_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool", request_serializer=cloudbuild.CreateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['create_worker_pool'] + return self._stubs["create_worker_pool"] @property - def get_worker_pool(self) -> Callable[ - [cloudbuild.GetWorkerPoolRequest], - Awaitable[cloudbuild.WorkerPool]]: + def get_worker_pool( + self, + ) -> Callable[[cloudbuild.GetWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool]]: r"""Return a callable for the get worker pool method over gRPC. Returns information about a ``WorkerPool``. @@ -599,18 +619,18 @@ def get_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_worker_pool' not in self._stubs: - self._stubs['get_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool', + if "get_worker_pool" not in self._stubs: + self._stubs["get_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool", request_serializer=cloudbuild.GetWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['get_worker_pool'] + return self._stubs["get_worker_pool"] @property - def delete_worker_pool(self) -> Callable[ - [cloudbuild.DeleteWorkerPoolRequest], - Awaitable[empty.Empty]]: + def delete_worker_pool( + self, + ) -> Callable[[cloudbuild.DeleteWorkerPoolRequest], Awaitable[empty.Empty]]: r"""Return a callable for the delete worker pool method over gRPC. Deletes a ``WorkerPool`` by its project ID and WorkerPool name. @@ -627,18 +647,20 @@ def delete_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_worker_pool' not in self._stubs: - self._stubs['delete_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool', + if "delete_worker_pool" not in self._stubs: + self._stubs["delete_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool", request_serializer=cloudbuild.DeleteWorkerPoolRequest.serialize, response_deserializer=empty.Empty.FromString, ) - return self._stubs['delete_worker_pool'] + return self._stubs["delete_worker_pool"] @property - def update_worker_pool(self) -> Callable[ - [cloudbuild.UpdateWorkerPoolRequest], - Awaitable[cloudbuild.WorkerPool]]: + def update_worker_pool( + self, + ) -> Callable[ + [cloudbuild.UpdateWorkerPoolRequest], Awaitable[cloudbuild.WorkerPool] + ]: r"""Return a callable for the update worker pool method over gRPC. Update a ``WorkerPool``. @@ -655,18 +677,21 @@ def update_worker_pool(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_worker_pool' not in self._stubs: - self._stubs['update_worker_pool'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool', + if "update_worker_pool" not in self._stubs: + self._stubs["update_worker_pool"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool", request_serializer=cloudbuild.UpdateWorkerPoolRequest.serialize, response_deserializer=cloudbuild.WorkerPool.deserialize, ) - return self._stubs['update_worker_pool'] + return self._stubs["update_worker_pool"] @property - def list_worker_pools(self) -> Callable[ - [cloudbuild.ListWorkerPoolsRequest], - Awaitable[cloudbuild.ListWorkerPoolsResponse]]: + def list_worker_pools( + self, + ) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], + Awaitable[cloudbuild.ListWorkerPoolsResponse], + ]: r"""Return a callable for the list worker pools method over gRPC. List project's ``WorkerPools``. @@ -683,15 +708,13 @@ def list_worker_pools(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_worker_pools' not in self._stubs: - self._stubs['list_worker_pools'] = self.grpc_channel.unary_unary( - '/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools', + if "list_worker_pools" not in self._stubs: + self._stubs["list_worker_pools"] = self.grpc_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools", request_serializer=cloudbuild.ListWorkerPoolsRequest.serialize, response_deserializer=cloudbuild.ListWorkerPoolsResponse.deserialize, ) - return self._stubs['list_worker_pools'] + return self._stubs["list_worker_pools"] -__all__ = ( - 'CloudBuildGrpcAsyncIOTransport', -) +__all__ = ("CloudBuildGrpcAsyncIOTransport",) diff --git a/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/google/cloud/devtools/cloudbuild_v1/types/__init__.py index 39596371..c9fc9fed 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -15,51 +15,95 @@ # limitations under the License. # -from .cloudbuild import (RetryBuildRequest, RunBuildTriggerRequest, StorageSource, RepoSource, Source, BuiltImage, BuildStep, Volume, Results, ArtifactResult, Build, Artifacts, TimeSpan, BuildOperationMetadata, SourceProvenance, FileHashes, Hash, Secret, CreateBuildRequest, GetBuildRequest, ListBuildsRequest, ListBuildsResponse, CancelBuildRequest, BuildTrigger, GitHubEventsConfig, PullRequestFilter, PushFilter, CreateBuildTriggerRequest, GetBuildTriggerRequest, ListBuildTriggersRequest, ListBuildTriggersResponse, DeleteBuildTriggerRequest, UpdateBuildTriggerRequest, BuildOptions, WorkerPool, WorkerConfig, Network, CreateWorkerPoolRequest, GetWorkerPoolRequest, DeleteWorkerPoolRequest, UpdateWorkerPoolRequest, ListWorkerPoolsRequest, ListWorkerPoolsResponse, ) +from .cloudbuild import ( + RetryBuildRequest, + RunBuildTriggerRequest, + StorageSource, + RepoSource, + Source, + BuiltImage, + BuildStep, + Volume, + Results, + ArtifactResult, + Build, + Artifacts, + TimeSpan, + BuildOperationMetadata, + SourceProvenance, + FileHashes, + Hash, + Secret, + CreateBuildRequest, + GetBuildRequest, + ListBuildsRequest, + ListBuildsResponse, + CancelBuildRequest, + BuildTrigger, + GitHubEventsConfig, + PullRequestFilter, + PushFilter, + CreateBuildTriggerRequest, + GetBuildTriggerRequest, + ListBuildTriggersRequest, + ListBuildTriggersResponse, + DeleteBuildTriggerRequest, + UpdateBuildTriggerRequest, + BuildOptions, + WorkerPool, + WorkerConfig, + Network, + CreateWorkerPoolRequest, + GetWorkerPoolRequest, + DeleteWorkerPoolRequest, + UpdateWorkerPoolRequest, + ListWorkerPoolsRequest, + ListWorkerPoolsResponse, +) __all__ = ( - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'StorageSource', - 'RepoSource', - 'Source', - 'BuiltImage', - 'BuildStep', - 'Volume', - 'Results', - 'ArtifactResult', - 'Build', - 'Artifacts', - 'TimeSpan', - 'BuildOperationMetadata', - 'SourceProvenance', - 'FileHashes', - 'Hash', - 'Secret', - 'CreateBuildRequest', - 'GetBuildRequest', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'CancelBuildRequest', - 'BuildTrigger', - 'GitHubEventsConfig', - 'PullRequestFilter', - 'PushFilter', - 'CreateBuildTriggerRequest', - 'GetBuildTriggerRequest', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'DeleteBuildTriggerRequest', - 'UpdateBuildTriggerRequest', - 'BuildOptions', - 'WorkerPool', - 'WorkerConfig', - 'Network', - 'CreateWorkerPoolRequest', - 'GetWorkerPoolRequest', - 'DeleteWorkerPoolRequest', - 'UpdateWorkerPoolRequest', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', + "RetryBuildRequest", + "RunBuildTriggerRequest", + "StorageSource", + "RepoSource", + "Source", + "BuiltImage", + "BuildStep", + "Volume", + "Results", + "ArtifactResult", + "Build", + "Artifacts", + "TimeSpan", + "BuildOperationMetadata", + "SourceProvenance", + "FileHashes", + "Hash", + "Secret", + "CreateBuildRequest", + "GetBuildRequest", + "ListBuildsRequest", + "ListBuildsResponse", + "CancelBuildRequest", + "BuildTrigger", + "GitHubEventsConfig", + "PullRequestFilter", + "PushFilter", + "CreateBuildTriggerRequest", + "GetBuildTriggerRequest", + "ListBuildTriggersRequest", + "ListBuildTriggersResponse", + "DeleteBuildTriggerRequest", + "UpdateBuildTriggerRequest", + "BuildOptions", + "WorkerPool", + "WorkerConfig", + "Network", + "CreateWorkerPoolRequest", + "GetWorkerPoolRequest", + "DeleteWorkerPoolRequest", + "UpdateWorkerPoolRequest", + "ListWorkerPoolsRequest", + "ListWorkerPoolsResponse", ) diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index d76531e4..b7f5dbbd 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -23,51 +23,51 @@ __protobuf__ = proto.module( - package='google.devtools.cloudbuild.v1', + package="google.devtools.cloudbuild.v1", manifest={ - 'RetryBuildRequest', - 'RunBuildTriggerRequest', - 'StorageSource', - 'RepoSource', - 'Source', - 'BuiltImage', - 'BuildStep', - 'Volume', - 'Results', - 'ArtifactResult', - 'Build', - 'Artifacts', - 'TimeSpan', - 'BuildOperationMetadata', - 'SourceProvenance', - 'FileHashes', - 'Hash', - 'Secret', - 'CreateBuildRequest', - 'GetBuildRequest', - 'ListBuildsRequest', - 'ListBuildsResponse', - 'CancelBuildRequest', - 'BuildTrigger', - 'GitHubEventsConfig', - 'PullRequestFilter', - 'PushFilter', - 'CreateBuildTriggerRequest', - 'GetBuildTriggerRequest', - 'ListBuildTriggersRequest', - 'ListBuildTriggersResponse', - 'DeleteBuildTriggerRequest', - 'UpdateBuildTriggerRequest', - 'BuildOptions', - 'WorkerPool', - 'WorkerConfig', - 'Network', - 'CreateWorkerPoolRequest', - 'GetWorkerPoolRequest', - 'DeleteWorkerPoolRequest', - 'UpdateWorkerPoolRequest', - 'ListWorkerPoolsRequest', - 'ListWorkerPoolsResponse', + "RetryBuildRequest", + "RunBuildTriggerRequest", + "StorageSource", + "RepoSource", + "Source", + "BuiltImage", + "BuildStep", + "Volume", + "Results", + "ArtifactResult", + "Build", + "Artifacts", + "TimeSpan", + "BuildOperationMetadata", + "SourceProvenance", + "FileHashes", + "Hash", + "Secret", + "CreateBuildRequest", + "GetBuildRequest", + "ListBuildsRequest", + "ListBuildsResponse", + "CancelBuildRequest", + "BuildTrigger", + "GitHubEventsConfig", + "PullRequestFilter", + "PushFilter", + "CreateBuildTriggerRequest", + "GetBuildTriggerRequest", + "ListBuildTriggersRequest", + "ListBuildTriggersResponse", + "DeleteBuildTriggerRequest", + "UpdateBuildTriggerRequest", + "BuildOptions", + "WorkerPool", + "WorkerConfig", + "Network", + "CreateWorkerPoolRequest", + "GetWorkerPoolRequest", + "DeleteWorkerPoolRequest", + "UpdateWorkerPoolRequest", + "ListWorkerPoolsRequest", + "ListWorkerPoolsResponse", }, ) @@ -104,9 +104,7 @@ class RunBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - source = proto.Field(proto.MESSAGE, number=3, - message='RepoSource', - ) + source = proto.Field(proto.MESSAGE, number=3, message="RepoSource",) class StorageSource(proto.Message): @@ -178,11 +176,11 @@ class RepoSource(proto.Message): repo_name = proto.Field(proto.STRING, number=2) - branch_name = proto.Field(proto.STRING, number=3, oneof='revision') + branch_name = proto.Field(proto.STRING, number=3, oneof="revision") - tag_name = proto.Field(proto.STRING, number=4, oneof='revision') + tag_name = proto.Field(proto.STRING, number=4, oneof="revision") - commit_sha = proto.Field(proto.STRING, number=5, oneof='revision') + commit_sha = proto.Field(proto.STRING, number=5, oneof="revision") dir = proto.Field(proto.STRING, number=7) @@ -203,12 +201,12 @@ class Source(proto.Message): location in a Cloud Source Repository. """ - storage_source = proto.Field(proto.MESSAGE, number=2, oneof='source', - message=StorageSource, + storage_source = proto.Field( + proto.MESSAGE, number=2, oneof="source", message=StorageSource, ) - repo_source = proto.Field(proto.MESSAGE, number=3, oneof='source', - message=RepoSource, + repo_source = proto.Field( + proto.MESSAGE, number=3, oneof="source", message=RepoSource, ) @@ -230,9 +228,7 @@ class BuiltImage(proto.Message): digest = proto.Field(proto.STRING, number=3) - push_timing = proto.Field(proto.MESSAGE, number=4, - message='TimeSpan', - ) + push_timing = proto.Field(proto.MESSAGE, number=4, message="TimeSpan",) class BuildStep(proto.Message): @@ -347,25 +343,15 @@ class BuildStep(proto.Message): secret_env = proto.RepeatedField(proto.STRING, number=8) - volumes = proto.RepeatedField(proto.MESSAGE, number=9, - message='Volume', - ) + volumes = proto.RepeatedField(proto.MESSAGE, number=9, message="Volume",) - timing = proto.Field(proto.MESSAGE, number=10, - message='TimeSpan', - ) + timing = proto.Field(proto.MESSAGE, number=10, message="TimeSpan",) - pull_timing = proto.Field(proto.MESSAGE, number=13, - message='TimeSpan', - ) + pull_timing = proto.Field(proto.MESSAGE, number=13, message="TimeSpan",) - timeout = proto.Field(proto.MESSAGE, number=11, - message=duration.Duration, - ) + timeout = proto.Field(proto.MESSAGE, number=11, message=duration.Duration,) - status = proto.Field(proto.ENUM, number=12, - enum='Build.Status', - ) + status = proto.Field(proto.ENUM, number=12, enum="Build.Status",) class Volume(proto.Message): @@ -421,9 +407,7 @@ class Results(proto.Message): Time to push all non-container artifacts. """ - images = proto.RepeatedField(proto.MESSAGE, number=2, - message=BuiltImage, - ) + images = proto.RepeatedField(proto.MESSAGE, number=2, message=BuiltImage,) build_step_images = proto.RepeatedField(proto.STRING, number=3) @@ -433,9 +417,7 @@ class Results(proto.Message): build_step_outputs = proto.RepeatedField(proto.BYTES, number=6) - artifact_timing = proto.Field(proto.MESSAGE, number=7, - message='TimeSpan', - ) + artifact_timing = proto.Field(proto.MESSAGE, number=7, message="TimeSpan",) class ArtifactResult(proto.Message): @@ -453,9 +435,7 @@ class ArtifactResult(proto.Message): location = proto.Field(proto.STRING, number=1) - file_hash = proto.RepeatedField(proto.MESSAGE, number=2, - message='FileHashes', - ) + file_hash = proto.RepeatedField(proto.MESSAGE, number=2, message="FileHashes",) class Build(proto.Message): @@ -571,6 +551,7 @@ class Build(proto.Message): If the build does not specify source or images, these keys will not be included. """ + class Status(proto.Enum): r"""Possible status of a build or build step.""" STATUS_UNKNOWN = 0 @@ -587,61 +568,39 @@ class Status(proto.Enum): project_id = proto.Field(proto.STRING, number=16) - status = proto.Field(proto.ENUM, number=2, - enum=Status, - ) + status = proto.Field(proto.ENUM, number=2, enum=Status,) status_detail = proto.Field(proto.STRING, number=24) - source = proto.Field(proto.MESSAGE, number=3, - message=Source, - ) + source = proto.Field(proto.MESSAGE, number=3, message=Source,) - steps = proto.RepeatedField(proto.MESSAGE, number=11, - message=BuildStep, - ) + steps = proto.RepeatedField(proto.MESSAGE, number=11, message=BuildStep,) - results = proto.Field(proto.MESSAGE, number=10, - message=Results, - ) + results = proto.Field(proto.MESSAGE, number=10, message=Results,) - create_time = proto.Field(proto.MESSAGE, number=6, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - start_time = proto.Field(proto.MESSAGE, number=7, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - finish_time = proto.Field(proto.MESSAGE, number=8, - message=timestamp.Timestamp, - ) + finish_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - timeout = proto.Field(proto.MESSAGE, number=12, - message=duration.Duration, - ) + timeout = proto.Field(proto.MESSAGE, number=12, message=duration.Duration,) images = proto.RepeatedField(proto.STRING, number=13) - queue_ttl = proto.Field(proto.MESSAGE, number=40, - message=duration.Duration, - ) + queue_ttl = proto.Field(proto.MESSAGE, number=40, message=duration.Duration,) - artifacts = proto.Field(proto.MESSAGE, number=37, - message='Artifacts', - ) + artifacts = proto.Field(proto.MESSAGE, number=37, message="Artifacts",) logs_bucket = proto.Field(proto.STRING, number=19) - source_provenance = proto.Field(proto.MESSAGE, number=21, - message='SourceProvenance', + source_provenance = proto.Field( + proto.MESSAGE, number=21, message="SourceProvenance", ) build_trigger_id = proto.Field(proto.STRING, number=22) - options = proto.Field(proto.MESSAGE, number=23, - message='BuildOptions', - ) + options = proto.Field(proto.MESSAGE, number=23, message="BuildOptions",) log_url = proto.Field(proto.STRING, number=25) @@ -649,13 +608,9 @@ class Status(proto.Enum): tags = proto.RepeatedField(proto.STRING, number=31) - secrets = proto.RepeatedField(proto.MESSAGE, number=32, - message='Secret', - ) + secrets = proto.RepeatedField(proto.MESSAGE, number=32, message="Secret",) - timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, - message='TimeSpan', - ) + timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, message="TimeSpan",) class Artifacts(proto.Message): @@ -690,6 +645,7 @@ class Artifacts(proto.Message): If any objects fail to be pushed, the build is marked FAILURE. """ + class ArtifactObjects(proto.Message): r"""Files in the workspace to upload to Cloud Storage upon successful completion of all build steps. @@ -714,15 +670,11 @@ class ArtifactObjects(proto.Message): paths = proto.RepeatedField(proto.STRING, number=2) - timing = proto.Field(proto.MESSAGE, number=3, - message='TimeSpan', - ) + timing = proto.Field(proto.MESSAGE, number=3, message="TimeSpan",) images = proto.RepeatedField(proto.STRING, number=1) - objects = proto.Field(proto.MESSAGE, number=2, - message=ArtifactObjects, - ) + objects = proto.Field(proto.MESSAGE, number=2, message=ArtifactObjects,) class TimeSpan(proto.Message): @@ -735,13 +687,9 @@ class TimeSpan(proto.Message): End of time span. """ - start_time = proto.Field(proto.MESSAGE, number=1, - message=timestamp.Timestamp, - ) + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, - message=timestamp.Timestamp, - ) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) class BuildOperationMetadata(proto.Message): @@ -752,9 +700,7 @@ class BuildOperationMetadata(proto.Message): The build that the operation is tracking. """ - build = proto.Field(proto.MESSAGE, number=1, - message=Build, - ) + build = proto.Field(proto.MESSAGE, number=1, message=Build,) class SourceProvenance(proto.Message): @@ -783,16 +729,14 @@ class SourceProvenance(proto.Message): the single path to that file. """ - resolved_storage_source = proto.Field(proto.MESSAGE, number=3, - message=StorageSource, + resolved_storage_source = proto.Field( + proto.MESSAGE, number=3, message=StorageSource, ) - resolved_repo_source = proto.Field(proto.MESSAGE, number=6, - message=RepoSource, - ) + resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message=RepoSource,) - file_hashes = proto.MapField(proto.STRING, proto.MESSAGE, number=4, - message='FileHashes', + file_hashes = proto.MapField( + proto.STRING, proto.MESSAGE, number=4, message="FileHashes", ) @@ -806,9 +750,7 @@ class FileHashes(proto.Message): Collection of file hashes. """ - file_hash = proto.RepeatedField(proto.MESSAGE, number=1, - message='Hash', - ) + file_hash = proto.RepeatedField(proto.MESSAGE, number=1, message="Hash",) class Hash(proto.Message): @@ -820,15 +762,14 @@ class Hash(proto.Message): value (bytes): The hash value. """ + class HashType(proto.Enum): r"""Specifies the hash algorithm, if any.""" NONE = 0 SHA256 = 1 MD5 = 2 - type = proto.Field(proto.ENUM, number=1, - enum=HashType, - ) + type = proto.Field(proto.ENUM, number=1, enum=HashType,) value = proto.Field(proto.BYTES, number=2) @@ -869,9 +810,7 @@ class CreateBuildRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - build = proto.Field(proto.MESSAGE, number=2, - message=Build, - ) + build = proto.Field(proto.MESSAGE, number=2, message=Build,) class GetBuildRequest(proto.Message): @@ -927,9 +866,7 @@ class ListBuildsResponse(proto.Message): def raw_page(self): return self - builds = proto.RepeatedField(proto.MESSAGE, number=1, - message=Build, - ) + builds = proto.RepeatedField(proto.MESSAGE, number=1, message=Build,) next_page_token = proto.Field(proto.STRING, number=2) @@ -1030,23 +967,15 @@ class BuildTrigger(proto.Message): tags = proto.RepeatedField(proto.STRING, number=19) - trigger_template = proto.Field(proto.MESSAGE, number=7, - message=RepoSource, - ) + trigger_template = proto.Field(proto.MESSAGE, number=7, message=RepoSource,) - github = proto.Field(proto.MESSAGE, number=13, - message='GitHubEventsConfig', - ) + github = proto.Field(proto.MESSAGE, number=13, message="GitHubEventsConfig",) - build = proto.Field(proto.MESSAGE, number=4, oneof='build_template', - message=Build, - ) + build = proto.Field(proto.MESSAGE, number=4, oneof="build_template", message=Build,) - filename = proto.Field(proto.STRING, number=8, oneof='build_template') + filename = proto.Field(proto.STRING, number=8, oneof="build_template") - create_time = proto.Field(proto.MESSAGE, number=5, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) disabled = proto.Field(proto.BOOL, number=9) @@ -1089,13 +1018,11 @@ class GitHubEventsConfig(proto.Message): name = proto.Field(proto.STRING, number=7) - pull_request = proto.Field(proto.MESSAGE, number=4, oneof='event', - message='PullRequestFilter', + pull_request = proto.Field( + proto.MESSAGE, number=4, oneof="event", message="PullRequestFilter", ) - push = proto.Field(proto.MESSAGE, number=5, oneof='event', - message='PushFilter', - ) + push = proto.Field(proto.MESSAGE, number=5, oneof="event", message="PushFilter",) class PullRequestFilter(proto.Message): @@ -1115,16 +1042,15 @@ class PullRequestFilter(proto.Message): If true, branches that do NOT match the git_ref will trigger a build. """ + class CommentControl(proto.Enum): r"""Controls behavior of Pull Request comments.""" COMMENTS_DISABLED = 0 COMMENTS_ENABLED = 1 - branch = proto.Field(proto.STRING, number=2, oneof='git_ref') + branch = proto.Field(proto.STRING, number=2, oneof="git_ref") - comment_control = proto.Field(proto.ENUM, number=5, - enum=CommentControl, - ) + comment_control = proto.Field(proto.ENUM, number=5, enum=CommentControl,) invert_regex = proto.Field(proto.BOOL, number=6) @@ -1149,9 +1075,9 @@ class PushFilter(proto.Message): NOT match the git_ref regex. """ - branch = proto.Field(proto.STRING, number=2, oneof='git_ref') + branch = proto.Field(proto.STRING, number=2, oneof="git_ref") - tag = proto.Field(proto.STRING, number=3, oneof='git_ref') + tag = proto.Field(proto.STRING, number=3, oneof="git_ref") invert_regex = proto.Field(proto.BOOL, number=4) @@ -1169,9 +1095,7 @@ class CreateBuildTriggerRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - trigger = proto.Field(proto.MESSAGE, number=2, - message=BuildTrigger, - ) + trigger = proto.Field(proto.MESSAGE, number=2, message=BuildTrigger,) class GetBuildTriggerRequest(proto.Message): @@ -1227,9 +1151,7 @@ class ListBuildTriggersResponse(proto.Message): def raw_page(self): return self - triggers = proto.RepeatedField(proto.MESSAGE, number=1, - message=BuildTrigger, - ) + triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=BuildTrigger,) next_page_token = proto.Field(proto.STRING, number=2) @@ -1267,9 +1189,7 @@ class UpdateBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - trigger = proto.Field(proto.MESSAGE, number=3, - message=BuildTrigger, - ) + trigger = proto.Field(proto.MESSAGE, number=3, message=BuildTrigger,) class BuildOptions(proto.Message): @@ -1334,6 +1254,7 @@ class BuildOptions(proto.Message): step is not valid as it is indicative of a build request with an incorrect configuration. """ + class VerifyOption(proto.Enum): r"""Specifies the manner in which the build should be verified, if at all. @@ -1368,41 +1289,29 @@ class LoggingMode(proto.Enum): LEGACY = 1 GCS_ONLY = 2 - source_provenance_hash = proto.RepeatedField(proto.ENUM, number=1, - enum=Hash.HashType, + source_provenance_hash = proto.RepeatedField( + proto.ENUM, number=1, enum=Hash.HashType, ) - requested_verify_option = proto.Field(proto.ENUM, number=2, - enum=VerifyOption, - ) + requested_verify_option = proto.Field(proto.ENUM, number=2, enum=VerifyOption,) - machine_type = proto.Field(proto.ENUM, number=3, - enum=MachineType, - ) + machine_type = proto.Field(proto.ENUM, number=3, enum=MachineType,) disk_size_gb = proto.Field(proto.INT64, number=6) - substitution_option = proto.Field(proto.ENUM, number=4, - enum=SubstitutionOption, - ) + substitution_option = proto.Field(proto.ENUM, number=4, enum=SubstitutionOption,) - log_streaming_option = proto.Field(proto.ENUM, number=5, - enum=LogStreamingOption, - ) + log_streaming_option = proto.Field(proto.ENUM, number=5, enum=LogStreamingOption,) worker_pool = proto.Field(proto.STRING, number=7) - logging = proto.Field(proto.ENUM, number=11, - enum=LoggingMode, - ) + logging = proto.Field(proto.ENUM, number=11, enum=LoggingMode,) env = proto.RepeatedField(proto.STRING, number=12) secret_env = proto.RepeatedField(proto.STRING, number=13) - volumes = proto.RepeatedField(proto.MESSAGE, number=14, - message=Volume, - ) + volumes = proto.RepeatedField(proto.MESSAGE, number=14, message=Volume,) class WorkerPool(proto.Message): @@ -1449,6 +1358,7 @@ class WorkerPool(proto.Message): status (~.cloudbuild.WorkerPool.Status): Output only. WorkerPool Status. """ + class Region(proto.Enum): r"""Supported GCP regions to create the ``WorkerPool``.""" REGION_UNSPECIFIED = 0 @@ -1473,29 +1383,17 @@ class Status(proto.Enum): worker_count = proto.Field(proto.INT64, number=4) - worker_config = proto.Field(proto.MESSAGE, number=16, - message='WorkerConfig', - ) + worker_config = proto.Field(proto.MESSAGE, number=16, message="WorkerConfig",) - regions = proto.RepeatedField(proto.ENUM, number=9, - enum=Region, - ) + regions = proto.RepeatedField(proto.ENUM, number=9, enum=Region,) - create_time = proto.Field(proto.MESSAGE, number=11, - message=timestamp.Timestamp, - ) + create_time = proto.Field(proto.MESSAGE, number=11, message=timestamp.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=17, - message=timestamp.Timestamp, - ) + update_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,) - delete_time = proto.Field(proto.MESSAGE, number=12, - message=timestamp.Timestamp, - ) + delete_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,) - status = proto.Field(proto.ENUM, number=13, - enum=Status, - ) + status = proto.Field(proto.ENUM, number=13, enum=Status,) class WorkerConfig(proto.Message): @@ -1536,9 +1434,7 @@ class WorkerConfig(proto.Message): disk_size_gb = proto.Field(proto.INT64, number=2) - network = proto.Field(proto.MESSAGE, number=3, - message='Network', - ) + network = proto.Field(proto.MESSAGE, number=3, message="Network",) tag = proto.Field(proto.STRING, number=4) @@ -1581,9 +1477,7 @@ class CreateWorkerPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - worker_pool = proto.Field(proto.MESSAGE, number=2, - message=WorkerPool, - ) + worker_pool = proto.Field(proto.MESSAGE, number=2, message=WorkerPool,) class GetWorkerPoolRequest(proto.Message): @@ -1626,9 +1520,7 @@ class UpdateWorkerPoolRequest(proto.Message): name = proto.Field(proto.STRING, number=2) - worker_pool = proto.Field(proto.MESSAGE, number=3, - message=WorkerPool, - ) + worker_pool = proto.Field(proto.MESSAGE, number=3, message=WorkerPool,) class ListWorkerPoolsRequest(proto.Message): @@ -1650,9 +1542,7 @@ class ListWorkerPoolsResponse(proto.Message): ``WorkerPools`` for the project. """ - worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, - message=WorkerPool, - ) + worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message=WorkerPool,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/synth.metadata b/synth.metadata index 71a531e8..8a012673 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f07cb4446192952f19be3056957f56d180586055" + "sha": "ee7506d15daa3873accfff9430eff7e3953f0248" } } ], diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 0212ebba..44666a8e 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -35,7 +35,9 @@ from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildAsyncClient +from google.cloud.devtools.cloudbuild_v1.services.cloud_build import ( + CloudBuildAsyncClient, +) from google.cloud.devtools.cloudbuild_v1.services.cloud_build import CloudBuildClient from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.services.cloud_build import transports @@ -54,7 +56,11 @@ def client_cert_source_callback(): # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) def test__get_default_mtls_endpoint(): @@ -65,17 +71,30 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert CloudBuildClient._get_default_mtls_endpoint(None) is None - assert CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi @pytest.mark.parametrize("client_class", [CloudBuildClient, CloudBuildAsyncClient]) def test_cloud_build_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client._transport._credentials == creds @@ -83,7 +102,7 @@ def test_cloud_build_client_from_service_account_file(client_class): client = client_class.from_service_account_json("dummy/file/path.json") assert client._transport._credentials == creds - assert client._transport._host == 'cloudbuild.googleapis.com:443' + assert client._transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_client_get_transport_class(): @@ -94,29 +113,42 @@ def test_cloud_build_client_get_transport_class(): assert transport == transports.CloudBuildGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") -]) -@mock.patch.object(CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient)) -@mock.patch.object(CloudBuildAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildAsyncClient)) -def test_cloud_build_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) +) +@mock.patch.object( + CloudBuildAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudBuildAsyncClient), +) +def test_cloud_build_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=credentials.AnonymousCredentials() - ) + with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudBuildClient, 'get_transport_class') as gtc: + with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -132,7 +164,7 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -148,7 +180,7 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -164,8 +196,10 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", and client_cert_source is provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -176,14 +210,16 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, - ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", and default_client_cert_source is provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -199,8 +235,11 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", but client_cert_source and default_client_cert_source are None. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -221,7 +260,7 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -235,16 +274,23 @@ def test_cloud_build_client_client_options(client_class, transport_class, transp ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") -]) -def test_cloud_build_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_build_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -258,16 +304,23 @@ def test_cloud_build_client_client_options_scopes(client_class, transport_class, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio") -]) -def test_cloud_build_client_client_options_credentials_file(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_build_client_client_options_credentials_file( + client_class, transport_class, transport_name +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( @@ -282,11 +335,11 @@ def test_cloud_build_client_client_options_credentials_file(client_class, transp def test_cloud_build_client_client_options_from_dict(): - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = CloudBuildClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = CloudBuildClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -298,10 +351,11 @@ def test_cloud_build_client_client_options_from_dict(): ) -def test_create_build(transport: str = 'grpc', request_type=cloudbuild.CreateBuildRequest): +def test_create_build( + transport: str = "grpc", request_type=cloudbuild.CreateBuildRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -309,11 +363,9 @@ def test_create_build(transport: str = 'grpc', request_type=cloudbuild.CreateBui request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.create_build), - '__call__') as call: + with mock.patch.object(type(client._transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_build(request) @@ -332,10 +384,9 @@ def test_create_build_from_dict(): @pytest.mark.asyncio -async def test_create_build_async(transport: str = 'grpc_asyncio'): +async def test_create_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -344,11 +395,11 @@ async def test_create_build_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build), - '__call__') as call: + type(client._client._transport.create_build), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_build(request) @@ -364,22 +415,17 @@ async def test_create_build_async(transport: str = 'grpc_asyncio'): def test_create_build_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.create_build), - '__call__') as call: + with mock.patch.object(type(client._transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(id='id_value'), + project_id="project_id_value", build=cloudbuild.Build(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -387,47 +433,42 @@ def test_create_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id='id_value') + assert args[0].build == cloudbuild.Build(id="id_value") def test_create_build_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_build( cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(id='id_value'), + project_id="project_id_value", + build=cloudbuild.Build(id="id_value"), ) @pytest.mark.asyncio async def test_create_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build), - '__call__') as call: + type(client._client._transport.create_build), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build( - project_id='project_id_value', - build=cloudbuild.Build(id='id_value'), + project_id="project_id_value", build=cloudbuild.Build(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -435,31 +476,28 @@ async def test_create_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id='id_value') + assert args[0].build == cloudbuild.Build(id="id_value") @pytest.mark.asyncio async def test_create_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_build( cloudbuild.CreateBuildRequest(), - project_id='project_id_value', - build=cloudbuild.Build(id='id_value'), + project_id="project_id_value", + build=cloudbuild.Build(id="id_value"), ) -def test_get_build(transport: str = 'grpc', request_type=cloudbuild.GetBuildRequest): +def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequest): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -467,29 +505,18 @@ def test_get_build(transport: str = 'grpc', request_type=cloudbuild.GetBuildRequ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_build), - '__call__') as call: + with mock.patch.object(type(client._transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( - id='id_value', - - project_id='project_id_value', - + id="id_value", + project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, - - status_detail='status_detail_value', - - images=['images_value'], - - logs_bucket='logs_bucket_value', - - build_trigger_id='build_trigger_id_value', - - log_url='log_url_value', - - tags=['tags_value'], - + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], ) response = client.get_build(request) @@ -503,23 +530,23 @@ def test_get_build(transport: str = 'grpc', request_type=cloudbuild.GetBuildRequ # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == 'status_detail_value' + assert response.status_detail == "status_detail_value" - assert response.images == ['images_value'] + assert response.images == ["images_value"] - assert response.logs_bucket == 'logs_bucket_value' + assert response.logs_bucket == "logs_bucket_value" - assert response.build_trigger_id == 'build_trigger_id_value' + assert response.build_trigger_id == "build_trigger_id_value" - assert response.log_url == 'log_url_value' + assert response.log_url == "log_url_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] def test_get_build_from_dict(): @@ -527,10 +554,9 @@ def test_get_build_from_dict(): @pytest.mark.asyncio -async def test_get_build_async(transport: str = 'grpc_asyncio'): +async def test_get_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -539,20 +565,22 @@ async def test_get_build_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build), - '__call__') as call: + type(client._client._transport.get_build), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.QUEUED, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.Build( + id="id_value", + project_id="project_id_value", + status=cloudbuild.Build.Status.QUEUED, + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], + ) + ) response = await client.get_build(request) @@ -565,42 +593,37 @@ async def test_get_build_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == 'status_detail_value' + assert response.status_detail == "status_detail_value" - assert response.images == ['images_value'] + assert response.images == ["images_value"] - assert response.logs_bucket == 'logs_bucket_value' + assert response.logs_bucket == "logs_bucket_value" - assert response.build_trigger_id == 'build_trigger_id_value' + assert response.build_trigger_id == "build_trigger_id_value" - assert response.log_url == 'log_url_value' + assert response.log_url == "log_url_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] def test_get_build_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_build), - '__call__') as call: + with mock.patch.object(type(client._transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_build( - project_id='project_id_value', - id='id_value', + project_id="project_id_value", id="id_value", ) # Establish that the underlying call was made with the expected @@ -608,77 +631,65 @@ def test_get_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" def test_get_build_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', + cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", ) @pytest.mark.asyncio async def test_get_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build), - '__call__') as call: + type(client._client._transport.get_build), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_build( - project_id='project_id_value', - id='id_value', - ) + response = await client.get_build(project_id="project_id_value", id="id_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" @pytest.mark.asyncio async def test_get_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_build( - cloudbuild.GetBuildRequest(), - project_id='project_id_value', - id='id_value', + cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", ) -def test_list_builds(transport: str = 'grpc', request_type=cloudbuild.ListBuildsRequest): +def test_list_builds( + transport: str = "grpc", request_type=cloudbuild.ListBuildsRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -686,13 +697,10 @@ def test_list_builds(transport: str = 'grpc', request_type=cloudbuild.ListBuilds request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_builds), - '__call__') as call: + with mock.patch.object(type(client._transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - + next_page_token="next_page_token_value", ) response = client.list_builds(request) @@ -706,7 +714,7 @@ def test_list_builds(transport: str = 'grpc', request_type=cloudbuild.ListBuilds # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_builds_from_dict(): @@ -714,10 +722,9 @@ def test_list_builds_from_dict(): @pytest.mark.asyncio -async def test_list_builds_async(transport: str = 'grpc_asyncio'): +async def test_list_builds_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -726,12 +733,12 @@ async def test_list_builds_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - '__call__') as call: + type(client._client._transport.list_builds), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.ListBuildsResponse(next_page_token="next_page_token_value",) + ) response = await client.list_builds(request) @@ -744,26 +751,21 @@ async def test_list_builds_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_builds_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_builds), - '__call__') as call: + with mock.patch.object(type(client._transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_builds( - project_id='project_id_value', - filter='filter_value', + project_id="project_id_value", filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -771,45 +773,42 @@ def test_list_builds_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].filter == 'filter_value' + assert args[0].filter == "filter_value" def test_list_builds_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_builds( cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', + project_id="project_id_value", + filter="filter_value", ) @pytest.mark.asyncio async def test_list_builds_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - '__call__') as call: + type(client._client._transport.list_builds), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.ListBuildsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_builds( - project_id='project_id_value', - filter='filter_value', + project_id="project_id_value", filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -817,61 +816,42 @@ async def test_list_builds_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].filter == 'filter_value' + assert args[0].filter == "filter_value" @pytest.mark.asyncio async def test_list_builds_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_builds( cloudbuild.ListBuildsRequest(), - project_id='project_id_value', - filter='filter_value', + project_id="project_id_value", + filter="filter_value", ) def test_list_builds_pager(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_builds), - '__call__') as call: + with mock.patch.object(type(client._transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', + builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], + next_page_token="abc", ), + cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', + builds=[cloudbuild.Build(),], next_page_token="ghi", ), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], + builds=[cloudbuild.Build(), cloudbuild.Build(),], ), RuntimeError, ) @@ -883,147 +863,106 @@ def test_list_builds_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in results) + assert all(isinstance(i, cloudbuild.Build) for i in results) + def test_list_builds_pages(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_builds), - '__call__') as call: + with mock.patch.object(type(client._transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', + builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], + next_page_token="abc", ), + cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', + builds=[cloudbuild.Build(),], next_page_token="ghi", ), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], + builds=[cloudbuild.Build(), cloudbuild.Build(),], ), RuntimeError, ) pages = list(client.list_builds(request={}).pages) - for page, token in zip(pages, ['abc','def','ghi', '']): + for page, token in zip(pages, ["abc", "def", "ghi", ""]): assert page.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_builds_async_pager(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: + type(client._client._transport.list_builds), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', - ), - cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', + builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], + next_page_token="abc", ), + cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', + builds=[cloudbuild.Build(),], next_page_token="ghi", ), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], + builds=[cloudbuild.Build(), cloudbuild.Build(),], ), RuntimeError, ) async_pager = await client.list_builds(request={},) - assert async_pager.next_page_token == 'abc' + assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.Build) - for i in responses) + assert all(isinstance(i, cloudbuild.Build) for i in responses) + @pytest.mark.asyncio async def test_list_builds_async_pages(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - '__call__', new_callable=mock.AsyncMock) as call: + type(client._client._transport.list_builds), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - cloudbuild.Build(), - ], - next_page_token='abc', + builds=[cloudbuild.Build(), cloudbuild.Build(), cloudbuild.Build(),], + next_page_token="abc", ), + cloudbuild.ListBuildsResponse(builds=[], next_page_token="def",), cloudbuild.ListBuildsResponse( - builds=[], - next_page_token='def', + builds=[cloudbuild.Build(),], next_page_token="ghi", ), cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildsResponse( - builds=[ - cloudbuild.Build(), - cloudbuild.Build(), - ], + builds=[cloudbuild.Build(), cloudbuild.Build(),], ), RuntimeError, ) pages = [] async for page in (await client.list_builds(request={})).pages: pages.append(page) - for page, token in zip(pages, ['abc','def','ghi', '']): + for page, token in zip(pages, ["abc", "def", "ghi", ""]): assert page.raw_page.next_page_token == token -def test_cancel_build(transport: str = 'grpc', request_type=cloudbuild.CancelBuildRequest): +def test_cancel_build( + transport: str = "grpc", request_type=cloudbuild.CancelBuildRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1031,29 +970,18 @@ def test_cancel_build(transport: str = 'grpc', request_type=cloudbuild.CancelBui request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.cancel_build), - '__call__') as call: + with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( - id='id_value', - - project_id='project_id_value', - + id="id_value", + project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, - - status_detail='status_detail_value', - - images=['images_value'], - - logs_bucket='logs_bucket_value', - - build_trigger_id='build_trigger_id_value', - - log_url='log_url_value', - - tags=['tags_value'], - + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], ) response = client.cancel_build(request) @@ -1067,23 +995,23 @@ def test_cancel_build(transport: str = 'grpc', request_type=cloudbuild.CancelBui # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == 'status_detail_value' + assert response.status_detail == "status_detail_value" - assert response.images == ['images_value'] + assert response.images == ["images_value"] - assert response.logs_bucket == 'logs_bucket_value' + assert response.logs_bucket == "logs_bucket_value" - assert response.build_trigger_id == 'build_trigger_id_value' + assert response.build_trigger_id == "build_trigger_id_value" - assert response.log_url == 'log_url_value' + assert response.log_url == "log_url_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] def test_cancel_build_from_dict(): @@ -1091,10 +1019,9 @@ def test_cancel_build_from_dict(): @pytest.mark.asyncio -async def test_cancel_build_async(transport: str = 'grpc_asyncio'): +async def test_cancel_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1103,20 +1030,22 @@ async def test_cancel_build_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.cancel_build), - '__call__') as call: + type(client._client._transport.cancel_build), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.Build( - id='id_value', - project_id='project_id_value', - status=cloudbuild.Build.Status.QUEUED, - status_detail='status_detail_value', - images=['images_value'], - logs_bucket='logs_bucket_value', - build_trigger_id='build_trigger_id_value', - log_url='log_url_value', - tags=['tags_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.Build( + id="id_value", + project_id="project_id_value", + status=cloudbuild.Build.Status.QUEUED, + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], + ) + ) response = await client.cancel_build(request) @@ -1129,42 +1058,37 @@ async def test_cancel_build_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" assert response.status == cloudbuild.Build.Status.QUEUED - assert response.status_detail == 'status_detail_value' + assert response.status_detail == "status_detail_value" - assert response.images == ['images_value'] + assert response.images == ["images_value"] - assert response.logs_bucket == 'logs_bucket_value' + assert response.logs_bucket == "logs_bucket_value" - assert response.build_trigger_id == 'build_trigger_id_value' + assert response.build_trigger_id == "build_trigger_id_value" - assert response.log_url == 'log_url_value' + assert response.log_url == "log_url_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] def test_cancel_build_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.cancel_build), - '__call__') as call: + with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.cancel_build( - project_id='project_id_value', - id='id_value', + project_id="project_id_value", id="id_value", ) # Establish that the underlying call was made with the expected @@ -1172,36 +1096,32 @@ def test_cancel_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" def test_cancel_build_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.cancel_build( cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', + project_id="project_id_value", + id="id_value", ) @pytest.mark.asyncio async def test_cancel_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.cancel_build), - '__call__') as call: + type(client._client._transport.cancel_build), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1209,8 +1129,7 @@ async def test_cancel_build_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.cancel_build( - project_id='project_id_value', - id='id_value', + project_id="project_id_value", id="id_value", ) # Establish that the underlying call was made with the expected @@ -1218,31 +1137,30 @@ async def test_cancel_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" @pytest.mark.asyncio async def test_cancel_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.cancel_build( cloudbuild.CancelBuildRequest(), - project_id='project_id_value', - id='id_value', + project_id="project_id_value", + id="id_value", ) -def test_retry_build(transport: str = 'grpc', request_type=cloudbuild.RetryBuildRequest): +def test_retry_build( + transport: str = "grpc", request_type=cloudbuild.RetryBuildRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1250,11 +1168,9 @@ def test_retry_build(transport: str = 'grpc', request_type=cloudbuild.RetryBuild request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.retry_build), - '__call__') as call: + with mock.patch.object(type(client._transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.retry_build(request) @@ -1273,10 +1189,9 @@ def test_retry_build_from_dict(): @pytest.mark.asyncio -async def test_retry_build_async(transport: str = 'grpc_asyncio'): +async def test_retry_build_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1285,11 +1200,11 @@ async def test_retry_build_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.retry_build), - '__call__') as call: + type(client._client._transport.retry_build), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.retry_build(request) @@ -1305,22 +1220,17 @@ async def test_retry_build_async(transport: str = 'grpc_asyncio'): def test_retry_build_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.retry_build), - '__call__') as call: + with mock.patch.object(type(client._transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.retry_build( - project_id='project_id_value', - id='id_value', + project_id="project_id_value", id="id_value", ) # Establish that the underlying call was made with the expected @@ -1328,47 +1238,42 @@ def test_retry_build_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" def test_retry_build_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.retry_build( cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', + project_id="project_id_value", + id="id_value", ) @pytest.mark.asyncio async def test_retry_build_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.retry_build), - '__call__') as call: + type(client._client._transport.retry_build), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.retry_build( - project_id='project_id_value', - id='id_value', + project_id="project_id_value", id="id_value", ) # Establish that the underlying call was made with the expected @@ -1376,31 +1281,30 @@ async def test_retry_build_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].id == 'id_value' + assert args[0].id == "id_value" @pytest.mark.asyncio async def test_retry_build_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.retry_build( cloudbuild.RetryBuildRequest(), - project_id='project_id_value', - id='id_value', + project_id="project_id_value", + id="id_value", ) -def test_create_build_trigger(transport: str = 'grpc', request_type=cloudbuild.CreateBuildTriggerRequest): +def test_create_build_trigger( + transport: str = "grpc", request_type=cloudbuild.CreateBuildTriggerRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1409,25 +1313,18 @@ def test_create_build_trigger(transport: str = 'grpc', request_type=cloudbuild.C # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), - '__call__') as call: + type(client._transport.create_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id='id_value', - - description='description_value', - - name='name_value', - - tags=['tags_value'], - + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], disabled=True, - - ignored_files=['ignored_files_value'], - - included_files=['included_files_value'], - - build=cloudbuild.Build(id='id_value'), + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + build=cloudbuild.Build(id="id_value"), ) response = client.create_build_trigger(request) @@ -1441,19 +1338,19 @@ def test_create_build_trigger(transport: str = 'grpc', request_type=cloudbuild.C # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_create_build_trigger_from_dict(): @@ -1461,10 +1358,9 @@ def test_create_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = 'grpc_asyncio'): +async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1473,18 +1369,20 @@ async def test_create_build_trigger_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), - '__call__') as call: + type(client._client._transport.create_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger( + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + ) + ) response = await client.create_build_trigger(request) @@ -1497,38 +1395,36 @@ async def test_create_build_trigger_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_create_build_trigger_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), - '__call__') as call: + type(client._transport.create_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -1536,45 +1432,43 @@ def test_create_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') + assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") def test_create_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_build_trigger( cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) @pytest.mark.asyncio async def test_create_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), - '__call__') as call: + type(client._client._transport.create_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build_trigger( - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -1582,31 +1476,30 @@ async def test_create_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') + assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") @pytest.mark.asyncio async def test_create_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_build_trigger( cloudbuild.CreateBuildTriggerRequest(), - project_id='project_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) -def test_get_build_trigger(transport: str = 'grpc', request_type=cloudbuild.GetBuildTriggerRequest): +def test_get_build_trigger( + transport: str = "grpc", request_type=cloudbuild.GetBuildTriggerRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1615,25 +1508,18 @@ def test_get_build_trigger(transport: str = 'grpc', request_type=cloudbuild.GetB # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), - '__call__') as call: + type(client._transport.get_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id='id_value', - - description='description_value', - - name='name_value', - - tags=['tags_value'], - + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], disabled=True, - - ignored_files=['ignored_files_value'], - - included_files=['included_files_value'], - - build=cloudbuild.Build(id='id_value'), + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + build=cloudbuild.Build(id="id_value"), ) response = client.get_build_trigger(request) @@ -1647,19 +1533,19 @@ def test_get_build_trigger(transport: str = 'grpc', request_type=cloudbuild.GetB # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_get_build_trigger_from_dict(): @@ -1667,10 +1553,9 @@ def test_get_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = 'grpc_asyncio'): +async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1679,18 +1564,20 @@ async def test_get_build_trigger_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), - '__call__') as call: + type(client._client._transport.get_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger( + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + ) + ) response = await client.get_build_trigger(request) @@ -1703,38 +1590,35 @@ async def test_get_build_trigger_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_get_build_trigger_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), - '__call__') as call: + type(client._transport.get_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -1742,45 +1626,42 @@ def test_get_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" def test_get_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_build_trigger( cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", + trigger_id="trigger_id_value", ) @pytest.mark.asyncio async def test_get_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), - '__call__') as call: + type(client._client._transport.get_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -1788,31 +1669,30 @@ async def test_get_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" @pytest.mark.asyncio async def test_get_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_build_trigger( cloudbuild.GetBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", + trigger_id="trigger_id_value", ) -def test_list_build_triggers(transport: str = 'grpc', request_type=cloudbuild.ListBuildTriggersRequest): +def test_list_build_triggers( + transport: str = "grpc", request_type=cloudbuild.ListBuildTriggersRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1821,12 +1701,11 @@ def test_list_build_triggers(transport: str = 'grpc', request_type=cloudbuild.Li # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), - '__call__') as call: + type(client._transport.list_build_triggers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - + next_page_token="next_page_token_value", ) response = client.list_build_triggers(request) @@ -1840,7 +1719,7 @@ def test_list_build_triggers(transport: str = 'grpc', request_type=cloudbuild.Li # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_build_triggers_from_dict(): @@ -1848,10 +1727,9 @@ def test_list_build_triggers_from_dict(): @pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = 'grpc_asyncio'): +async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1860,12 +1738,14 @@ async def test_list_build_triggers_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - '__call__') as call: + type(client._client._transport.list_build_triggers), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.ListBuildTriggersResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_build_triggers(request) @@ -1878,101 +1758,87 @@ async def test_list_build_triggers_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_build_triggers_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), - '__call__') as call: + type(client._transport.list_build_triggers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_build_triggers( - project_id='project_id_value', - ) + client.list_build_triggers(project_id="project_id_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" def test_list_build_triggers_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', + cloudbuild.ListBuildTriggersRequest(), project_id="project_id_value", ) @pytest.mark.asyncio async def test_list_build_triggers_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - '__call__') as call: + type(client._client._transport.list_build_triggers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListBuildTriggersResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.ListBuildTriggersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_build_triggers( - project_id='project_id_value', - ) + response = await client.list_build_triggers(project_id="project_id_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" @pytest.mark.asyncio async def test_list_build_triggers_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_build_triggers( - cloudbuild.ListBuildTriggersRequest(), - project_id='project_id_value', + cloudbuild.ListBuildTriggersRequest(), project_id="project_id_value", ) def test_list_build_triggers_pager(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), - '__call__') as call: + type(client._transport.list_build_triggers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -1981,23 +1847,14 @@ def test_list_build_triggers_pager(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token='abc', + next_page_token="abc", ), + cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', + triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", ), cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], + triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], ), RuntimeError, ) @@ -2009,18 +1866,16 @@ def test_list_build_triggers_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in results) + assert all(isinstance(i, cloudbuild.BuildTrigger) for i in results) + def test_list_build_triggers_pages(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), - '__call__') as call: + type(client._transport.list_build_triggers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -2029,40 +1884,32 @@ def test_list_build_triggers_pages(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token='abc', + next_page_token="abc", ), + cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', + triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", ), cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], + triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], ), RuntimeError, ) pages = list(client.list_build_triggers(request={}).pages) - for page, token in zip(pages, ['abc','def','ghi', '']): + for page, token in zip(pages, ["abc", "def", "ghi", ""]): assert page.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_build_triggers_async_pager(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client._client._transport.list_build_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -2071,46 +1918,37 @@ async def test_list_build_triggers_async_pager(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token='abc', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', + next_page_token="abc", ), + cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', + triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", ), cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], + triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], ), RuntimeError, ) async_pager = await client.list_build_triggers(request={},) - assert async_pager.next_page_token == 'abc' + assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloudbuild.BuildTrigger) - for i in responses) + assert all(isinstance(i, cloudbuild.BuildTrigger) for i in responses) + @pytest.mark.asyncio async def test_list_build_triggers_async_pages(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials, - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client._client._transport.list_build_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildTriggersResponse( @@ -2119,37 +1957,29 @@ async def test_list_build_triggers_async_pages(): cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(), ], - next_page_token='abc', + next_page_token="abc", ), + cloudbuild.ListBuildTriggersResponse(triggers=[], next_page_token="def",), cloudbuild.ListBuildTriggersResponse( - triggers=[], - next_page_token='def', - ), - cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - ], - next_page_token='ghi', + triggers=[cloudbuild.BuildTrigger(),], next_page_token="ghi", ), cloudbuild.ListBuildTriggersResponse( - triggers=[ - cloudbuild.BuildTrigger(), - cloudbuild.BuildTrigger(), - ], + triggers=[cloudbuild.BuildTrigger(), cloudbuild.BuildTrigger(),], ), RuntimeError, ) pages = [] async for page in (await client.list_build_triggers(request={})).pages: pages.append(page) - for page, token in zip(pages, ['abc','def','ghi', '']): + for page, token in zip(pages, ["abc", "def", "ghi", ""]): assert page.raw_page.next_page_token == token -def test_delete_build_trigger(transport: str = 'grpc', request_type=cloudbuild.DeleteBuildTriggerRequest): +def test_delete_build_trigger( + transport: str = "grpc", request_type=cloudbuild.DeleteBuildTriggerRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2158,8 +1988,8 @@ def test_delete_build_trigger(transport: str = 'grpc', request_type=cloudbuild.D # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), - '__call__') as call: + type(client._transport.delete_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2180,10 +2010,9 @@ def test_delete_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio'): +async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2192,8 +2021,8 @@ async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), - '__call__') as call: + type(client._client._transport.delete_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2210,22 +2039,19 @@ async def test_delete_build_trigger_async(transport: str = 'grpc_asyncio'): def test_delete_build_trigger_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), - '__call__') as call: + type(client._transport.delete_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2233,36 +2059,32 @@ def test_delete_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" def test_delete_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_build_trigger( cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", + trigger_id="trigger_id_value", ) @pytest.mark.asyncio async def test_delete_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), - '__call__') as call: + type(client._client._transport.delete_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2270,8 +2092,7 @@ async def test_delete_build_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2279,31 +2100,30 @@ async def test_delete_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" @pytest.mark.asyncio async def test_delete_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_build_trigger( cloudbuild.DeleteBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', + project_id="project_id_value", + trigger_id="trigger_id_value", ) -def test_update_build_trigger(transport: str = 'grpc', request_type=cloudbuild.UpdateBuildTriggerRequest): +def test_update_build_trigger( + transport: str = "grpc", request_type=cloudbuild.UpdateBuildTriggerRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2312,25 +2132,18 @@ def test_update_build_trigger(transport: str = 'grpc', request_type=cloudbuild.U # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), - '__call__') as call: + type(client._transport.update_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( - id='id_value', - - description='description_value', - - name='name_value', - - tags=['tags_value'], - + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], disabled=True, - - ignored_files=['ignored_files_value'], - - included_files=['included_files_value'], - - build=cloudbuild.Build(id='id_value'), + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + build=cloudbuild.Build(id="id_value"), ) response = client.update_build_trigger(request) @@ -2344,19 +2157,19 @@ def test_update_build_trigger(transport: str = 'grpc', request_type=cloudbuild.U # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_update_build_trigger_from_dict(): @@ -2364,10 +2177,9 @@ def test_update_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = 'grpc_asyncio'): +async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2376,18 +2188,20 @@ async def test_update_build_trigger_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), - '__call__') as call: + type(client._client._transport.update_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger( - id='id_value', - description='description_value', - name='name_value', - tags=['tags_value'], - disabled=True, - ignored_files=['ignored_files_value'], - included_files=['included_files_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger( + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + ) + ) response = await client.update_build_trigger(request) @@ -2400,39 +2214,37 @@ async def test_update_build_trigger_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) - assert response.id == 'id_value' + assert response.id == "id_value" - assert response.description == 'description_value' + assert response.description == "description_value" - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.tags == ['tags_value'] + assert response.tags == ["tags_value"] assert response.disabled is True - assert response.ignored_files == ['ignored_files_value'] + assert response.ignored_files == ["ignored_files_value"] - assert response.included_files == ['included_files_value'] + assert response.included_files == ["included_files_value"] def test_update_build_trigger_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), - '__call__') as call: + type(client._transport.update_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -2440,49 +2252,47 @@ def test_update_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" - assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') + assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") def test_update_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_build_trigger( cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) @pytest.mark.asyncio async def test_update_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), - '__call__') as call: + type(client._client._transport.update_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.BuildTrigger()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.BuildTrigger() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) # Establish that the underlying call was made with the expected @@ -2490,34 +2300,33 @@ async def test_update_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" - assert args[0].trigger == cloudbuild.BuildTrigger(id='id_value') + assert args[0].trigger == cloudbuild.BuildTrigger(id="id_value") @pytest.mark.asyncio async def test_update_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_build_trigger( cloudbuild.UpdateBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - trigger=cloudbuild.BuildTrigger(id='id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(id="id_value"), ) -def test_run_build_trigger(transport: str = 'grpc', request_type=cloudbuild.RunBuildTriggerRequest): +def test_run_build_trigger( + transport: str = "grpc", request_type=cloudbuild.RunBuildTriggerRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2526,10 +2335,10 @@ def test_run_build_trigger(transport: str = 'grpc', request_type=cloudbuild.RunB # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), - '__call__') as call: + type(client._transport.run_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.run_build_trigger(request) @@ -2548,10 +2357,9 @@ def test_run_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = 'grpc_asyncio'): +async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2560,11 +2368,11 @@ async def test_run_build_trigger_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), - '__call__') as call: + type(client._client._transport.run_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.run_build_trigger(request) @@ -2580,23 +2388,21 @@ async def test_run_build_trigger_async(transport: str = 'grpc_asyncio'): def test_run_build_trigger_flattened(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), - '__call__') as call: + type(client._transport.run_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), ) # Establish that the underlying call was made with the expected @@ -2604,51 +2410,47 @@ def test_run_build_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" - assert args[0].source == cloudbuild.RepoSource(project_id='project_id_value') + assert args[0].source == cloudbuild.RepoSource(project_id="project_id_value") def test_run_build_trigger_flattened_error(): - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.run_build_trigger( cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), ) @pytest.mark.asyncio async def test_run_build_trigger_flattened_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), - '__call__') as call: + type(client._client._transport.run_build_trigger), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.run_build_trigger( - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), ) # Establish that the underlying call was made with the expected @@ -2656,34 +2458,33 @@ async def test_run_build_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == 'project_id_value' + assert args[0].project_id == "project_id_value" - assert args[0].trigger_id == 'trigger_id_value' + assert args[0].trigger_id == "trigger_id_value" - assert args[0].source == cloudbuild.RepoSource(project_id='project_id_value') + assert args[0].source == cloudbuild.RepoSource(project_id="project_id_value") @pytest.mark.asyncio async def test_run_build_trigger_flattened_error_async(): - client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - ) + client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.run_build_trigger( cloudbuild.RunBuildTriggerRequest(), - project_id='project_id_value', - trigger_id='trigger_id_value', - source=cloudbuild.RepoSource(project_id='project_id_value'), + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), ) -def test_create_worker_pool(transport: str = 'grpc', request_type=cloudbuild.CreateWorkerPoolRequest): +def test_create_worker_pool( + transport: str = "grpc", request_type=cloudbuild.CreateWorkerPoolRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2692,22 +2493,16 @@ def test_create_worker_pool(transport: str = 'grpc', request_type=cloudbuild.Cre # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_worker_pool), - '__call__') as call: + type(client._transport.create_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name='name_value', - - project_id='project_id_value', - - service_account_email='service_account_email_value', - + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) response = client.create_worker_pool(request) @@ -2721,11 +2516,11 @@ def test_create_worker_pool(transport: str = 'grpc', request_type=cloudbuild.Cre # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -2739,10 +2534,9 @@ def test_create_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = 'grpc_asyncio'): +async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2751,17 +2545,19 @@ async def test_create_worker_pool_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_worker_pool), - '__call__') as call: + type(client._client._transport.create_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( - name='name_value', - project_id='project_id_value', - service_account_email='service_account_email_value', - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.WorkerPool( + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) + ) response = await client.create_worker_pool(request) @@ -2774,11 +2570,11 @@ async def test_create_worker_pool_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -2787,10 +2583,11 @@ async def test_create_worker_pool_async(transport: str = 'grpc_asyncio'): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_get_worker_pool(transport: str = 'grpc', request_type=cloudbuild.GetWorkerPoolRequest): +def test_get_worker_pool( + transport: str = "grpc", request_type=cloudbuild.GetWorkerPoolRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2798,23 +2595,15 @@ def test_get_worker_pool(transport: str = 'grpc', request_type=cloudbuild.GetWor request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_worker_pool), - '__call__') as call: + with mock.patch.object(type(client._transport.get_worker_pool), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name='name_value', - - project_id='project_id_value', - - service_account_email='service_account_email_value', - + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) response = client.get_worker_pool(request) @@ -2828,11 +2617,11 @@ def test_get_worker_pool(transport: str = 'grpc', request_type=cloudbuild.GetWor # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -2846,10 +2635,9 @@ def test_get_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = 'grpc_asyncio'): +async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2858,17 +2646,19 @@ async def test_get_worker_pool_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_worker_pool), - '__call__') as call: + type(client._client._transport.get_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( - name='name_value', - project_id='project_id_value', - service_account_email='service_account_email_value', - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.WorkerPool( + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) + ) response = await client.get_worker_pool(request) @@ -2881,11 +2671,11 @@ async def test_get_worker_pool_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -2894,10 +2684,11 @@ async def test_get_worker_pool_async(transport: str = 'grpc_asyncio'): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_delete_worker_pool(transport: str = 'grpc', request_type=cloudbuild.DeleteWorkerPoolRequest): +def test_delete_worker_pool( + transport: str = "grpc", request_type=cloudbuild.DeleteWorkerPoolRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2906,8 +2697,8 @@ def test_delete_worker_pool(transport: str = 'grpc', request_type=cloudbuild.Del # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_worker_pool), - '__call__') as call: + type(client._transport.delete_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2928,10 +2719,9 @@ def test_delete_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio'): +async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2940,8 +2730,8 @@ async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_worker_pool), - '__call__') as call: + type(client._client._transport.delete_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2957,10 +2747,11 @@ async def test_delete_worker_pool_async(transport: str = 'grpc_asyncio'): assert response is None -def test_update_worker_pool(transport: str = 'grpc', request_type=cloudbuild.UpdateWorkerPoolRequest): +def test_update_worker_pool( + transport: str = "grpc", request_type=cloudbuild.UpdateWorkerPoolRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2969,22 +2760,16 @@ def test_update_worker_pool(transport: str = 'grpc', request_type=cloudbuild.Upd # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_worker_pool), - '__call__') as call: + type(client._transport.update_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( - name='name_value', - - project_id='project_id_value', - - service_account_email='service_account_email_value', - + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - ) response = client.update_worker_pool(request) @@ -2998,11 +2783,11 @@ def test_update_worker_pool(transport: str = 'grpc', request_type=cloudbuild.Upd # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -3016,10 +2801,9 @@ def test_update_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = 'grpc_asyncio'): +async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3028,17 +2812,19 @@ async def test_update_worker_pool_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_worker_pool), - '__call__') as call: + type(client._client._transport.update_worker_pool), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.WorkerPool( - name='name_value', - project_id='project_id_value', - service_account_email='service_account_email_value', - worker_count=1314, - regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], - status=cloudbuild.WorkerPool.Status.CREATING, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.WorkerPool( + name="name_value", + project_id="project_id_value", + service_account_email="service_account_email_value", + worker_count=1314, + regions=[cloudbuild.WorkerPool.Region.US_CENTRAL1], + status=cloudbuild.WorkerPool.Status.CREATING, + ) + ) response = await client.update_worker_pool(request) @@ -3051,11 +2837,11 @@ async def test_update_worker_pool_async(transport: str = 'grpc_asyncio'): # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) - assert response.name == 'name_value' + assert response.name == "name_value" - assert response.project_id == 'project_id_value' + assert response.project_id == "project_id_value" - assert response.service_account_email == 'service_account_email_value' + assert response.service_account_email == "service_account_email_value" assert response.worker_count == 1314 @@ -3064,10 +2850,11 @@ async def test_update_worker_pool_async(transport: str = 'grpc_asyncio'): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_list_worker_pools(transport: str = 'grpc', request_type=cloudbuild.ListWorkerPoolsRequest): +def test_list_worker_pools( + transport: str = "grpc", request_type=cloudbuild.ListWorkerPoolsRequest +): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3076,11 +2863,10 @@ def test_list_worker_pools(transport: str = 'grpc', request_type=cloudbuild.List # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_worker_pools), - '__call__') as call: + type(client._transport.list_worker_pools), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = cloudbuild.ListWorkerPoolsResponse( - ) + call.return_value = cloudbuild.ListWorkerPoolsResponse() response = client.list_worker_pools(request) @@ -3099,10 +2885,9 @@ def test_list_worker_pools_from_dict(): @pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = 'grpc_asyncio'): +async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3111,11 +2896,12 @@ async def test_list_worker_pools_async(transport: str = 'grpc_asyncio'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_worker_pools), - '__call__') as call: + type(client._client._transport.list_worker_pools), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloudbuild.ListWorkerPoolsResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.ListWorkerPoolsResponse() + ) response = await client.list_worker_pools(request) @@ -3136,8 +2922,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport=transport, + credentials=credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3156,8 +2941,7 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -3187,13 +2971,8 @@ def test_transport_get_channel(): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - ) - assert isinstance( - client._transport, - transports.CloudBuildGrpcTransport, - ) + client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.CloudBuildGrpcTransport,) def test_cloud_build_base_transport_error(): @@ -3201,13 +2980,15 @@ def test_cloud_build_base_transport_error(): with pytest.raises(exceptions.DuplicateCredentialArgs): transport = transports.CloudBuildTransport( credentials=credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_cloud_build_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__') as Transport: + with mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.CloudBuildTransport( credentials=credentials.AnonymousCredentials(), @@ -3216,23 +2997,23 @@ def test_cloud_build_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'create_build', - 'get_build', - 'list_builds', - 'cancel_build', - 'retry_build', - 'create_build_trigger', - 'get_build_trigger', - 'list_build_triggers', - 'delete_build_trigger', - 'update_build_trigger', - 'run_build_trigger', - 'create_worker_pool', - 'get_worker_pool', - 'delete_worker_pool', - 'update_worker_pool', - 'list_worker_pools', - ) + "create_build", + "get_build", + "list_builds", + "cancel_build", + "retry_build", + "create_build_trigger", + "get_build_trigger", + "list_build_triggers", + "delete_build_trigger", + "update_build_trigger", + "run_build_trigger", + "create_worker_pool", + "get_worker_pool", + "delete_worker_pool", + "update_worker_pool", + "list_worker_pools", + ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -3245,27 +3026,30 @@ def test_cloud_build_base_transport(): def test_cloud_build_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, 'load_credentials_from_file') as load_creds, mock.patch('google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.CloudBuildTransport( - credentials_file="credentials.json", - quota_project_id="octopus", + credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_cloud_build_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) CloudBuildClient() - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform',), + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -3273,32 +3057,39 @@ def test_cloud_build_auth_adc(): def test_cloud_build_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudBuildGrpcTransport(host="squid.clam.whelk", quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform',), + transports.CloudBuildGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) + def test_cloud_build_host_no_port(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com'), + client_options=client_options.ClientOptions( + api_endpoint="cloudbuild.googleapis.com" + ), ) - assert client._transport._host == 'cloudbuild.googleapis.com:443' + assert client._transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_host_with_port(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudbuild.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="cloudbuild.googleapis.com:8000" + ), ) - assert client._transport._host == 'cloudbuild.googleapis.com:8000' + assert client._transport._host == "cloudbuild.googleapis.com:8000" def test_cloud_build_grpc_transport_channel(): - channel = grpc.insecure_channel('http://localhost/') + channel = grpc.insecure_channel("http://localhost/") # Check that if channel is provided, mtls endpoint and client_cert_source # won't be used. @@ -3315,7 +3106,7 @@ def test_cloud_build_grpc_transport_channel(): def test_cloud_build_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel('http://localhost/') + channel = aio.insecure_channel("http://localhost/") # Check that if channel is provided, mtls endpoint and client_cert_source # won't be used. @@ -3359,9 +3150,7 @@ def test_cloud_build_grpc_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3396,9 +3185,7 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_client_cert_source "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3435,9 +3222,7 @@ def test_cloud_build_grpc_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3474,9 +3259,7 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, ) @@ -3485,16 +3268,12 @@ def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( def test_cloud_build_grpc_lro_client(): client = CloudBuildClient( - credentials=credentials.AnonymousCredentials(), - transport='grpc', + credentials=credentials.AnonymousCredentials(), transport="grpc", ) transport = client._transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -3502,16 +3281,12 @@ def test_cloud_build_grpc_lro_client(): def test_cloud_build_grpc_lro_async_client(): client = CloudBuildAsyncClient( - credentials=credentials.AnonymousCredentials(), - transport='grpc_asyncio', + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client._client._transport # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client From 4a8a2d07edc468197c03f034ae19cbe703cfc15c Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 31 Jul 2020 08:56:09 -0700 Subject: [PATCH 4/6] feat(python-library): changes to docs job * feat(python-library): changes to docs job * migrate to Trampoline V2 * add docs-presubmit job * create docfx yaml files and upload them to another bucket * remove redundant envvars * add a failing test first * fix TemplateSyntaxError: Missing end of comment tag * serving_path is not needed any more * use `raw` to make jinja happy Source-Author: Takashi Matsuo Source-Date: Thu Jul 30 12:44:02 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 5dfda5621df45b71b6e88544ebbb53b1a8c90214 Source-Link: https://github.com/googleapis/synthtool/commit/5dfda5621df45b71b6e88544ebbb53b1a8c90214 --- .gitignore | 3 +- .kokoro/docker/docs/Dockerfile | 98 ++++++ .kokoro/docker/docs/fetch_gpg_keys.sh | 45 +++ .kokoro/docs/common.cfg | 17 + .kokoro/docs/docs-presubmit.cfg | 17 + .kokoro/publish-docs.sh | 21 +- .kokoro/trampoline_v2.sh | 487 ++++++++++++++++++++++++++ .trampolinerc | 51 +++ docs/conf.py | 3 + noxfile.py | 33 ++ synth.metadata | 2 +- 11 files changed, 774 insertions(+), 3 deletions(-) create mode 100644 .kokoro/docker/docs/Dockerfile create mode 100755 .kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 .kokoro/docs/docs-presubmit.cfg create mode 100755 .kokoro/trampoline_v2.sh create mode 100644 .trampolinerc diff --git a/.gitignore b/.gitignore index b87e1ed5..b9daa52f 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 00000000..412b0b56 --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 00000000..d653dd86 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 0e6863e9..ddca87f3 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 00000000..11181078 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 6877cdd5..660af961 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -52,4 +52,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 00000000..719bcd5b --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 00000000..995ee291 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/docs/conf.py b/docs/conf.py index 2d0ebebb..faa1e829 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,6 +20,9 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ diff --git a/noxfile.py b/noxfile.py index da184cbd..3ba8d36e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -162,3 +162,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/synth.metadata b/synth.metadata index 8a012673..ecc07b82 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "ee7506d15daa3873accfff9430eff7e3953f0248" + "sha": "5dfda5621df45b71b6e88544ebbb53b1a8c90214" } } ], From ef8725c273c132097a5b91590fc0c6ec5d172641 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 31 Jul 2020 08:56:27 -0700 Subject: [PATCH 5/6] fix(python-library): add missing changes Source-Author: Takashi Matsuo Source-Date: Thu Jul 30 18:26:35 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 39b527a39f5cd56d4882b3874fc08eed4756cebe Source-Link: https://github.com/googleapis/synthtool/commit/39b527a39f5cd56d4882b3874fc08eed4756cebe --- .kokoro/docs/common.cfg | 4 ++-- .kokoro/publish-docs.sh | 18 ++++-------------- synth.metadata | 2 +- 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index ddca87f3..a86c6f8b 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-cloudbuild/.kokoro/trampoline.sh" +build_file: "python-cloudbuild/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 660af961..8acb14e8 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-cloudbuild - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ diff --git a/synth.metadata b/synth.metadata index ecc07b82..3e5ab1e1 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,7 +19,7 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5dfda5621df45b71b6e88544ebbb53b1a8c90214" + "sha": "39b527a39f5cd56d4882b3874fc08eed4756cebe" } } ], From 9537f9062006689063077901aedcd5a6feb3574c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Wed, 4 Nov 2020 15:57:30 +0000 Subject: [PATCH 6/6] chore: regen --- .github/snippet-bot.yml | 0 .kokoro/build.sh | 8 +- .kokoro/docs/common.cfg | 2 +- .kokoro/populate-secrets.sh | 43 + .kokoro/release/common.cfg | 50 +- .kokoro/samples/python3.6/common.cfg | 6 + .kokoro/samples/python3.7/common.cfg | 6 + .kokoro/samples/python3.8/common.cfg | 6 + .kokoro/test-samples.sh | 8 +- .kokoro/trampoline.sh | 15 +- CODE_OF_CONDUCT.md | 123 +- CONTRIBUTING.rst | 19 - docs/cloudbuild_v1/types.rst | 1 + docs/conf.py | 12 +- .../cloudbuild_v1/proto/cloudbuild.proto | 137 +- .../services/cloud_build/async_client.py | 155 ++- .../services/cloud_build/client.py | 213 +++- .../services/cloud_build/transports/base.py | 68 +- .../services/cloud_build/transports/grpc.py | 72 +- .../cloud_build/transports/grpc_asyncio.py | 65 +- .../cloudbuild_v1/types/cloudbuild.py | 140 +- noxfile.py | 12 +- scripts/decrypt-secrets.sh | 15 +- scripts/fixup_cloudbuild_v1_keywords.py | 11 +- synth.metadata | 10 +- .../gapic/cloudbuild_v1/test_cloud_build.py | 1124 +++++++++++------ 26 files changed, 1603 insertions(+), 718 deletions(-) create mode 100644 .github/snippet-bot.yml create mode 100755 .kokoro/populate-secrets.sh diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 00000000..e69de29b diff --git a/.kokoro/build.sh b/.kokoro/build.sh index d76d29f6..185e4aaa 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index a86c6f8b..5b751977 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..f5251425 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index dfcae22e..41909982 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-cloudbuild/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 335f2379..b73a217c 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index a9fec387..36ba440c 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index eb2f3c25..5a0de211 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index e560d138..d42f5129 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f..f39236e9 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f602..039f4368 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7566ac72..1f4f33e6 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/docs/cloudbuild_v1/types.rst b/docs/cloudbuild_v1/types.rst index efe8ce72..3fcd1832 100644 --- a/docs/cloudbuild_v1/types.rst +++ b/docs/cloudbuild_v1/types.rst @@ -3,3 +3,4 @@ Types for Google Devtools Cloudbuild v1 API .. automodule:: google.cloud.devtools.cloudbuild_v1.types :members: + :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py index faa1e829..4a604d68 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,6 +21,7 @@ sys.path.insert(0, os.path.abspath("..")) # For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 sys.path.insert(0, os.path.abspath(".")) __version__ = "" @@ -28,7 +29,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -38,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -93,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -342,6 +349,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto index 4e3a0188..de8a1de3 100644 --- a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto +++ b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto @@ -19,9 +19,11 @@ package google.devtools.cloudbuild.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1;cloudbuild"; @@ -29,6 +31,14 @@ option java_multiple_files = true; option java_package = "com.google.cloudbuild.v1"; option objc_class_prefix = "GCB"; option ruby_package = "Google::Cloud::Build::V1"; +option (google.api.resource_definition) = { + type: "compute.googleapis.com/Network" + pattern: "projects/{project}/global/networks/{network}" +}; +option (google.api.resource_definition) = { + type: "iam.googleapis.com/ServiceAccount" + pattern: "projects/{project}/serviceAccounts/{service_account}" +}; // Creates and manages builds on Google Cloud Platform. // @@ -52,6 +62,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds" body: "build" + additional_bindings { + post: "/v1/{parent=projects/*/locations/*}/builds" + body: "*" + } }; option (google.api.method_signature) = "project_id,build"; option (google.longrunning.operation_info) = { @@ -67,6 +81,7 @@ service CloudBuild { rpc GetBuild(GetBuildRequest) returns (Build) { option (google.api.http) = { get: "/v1/projects/{project_id}/builds/{id}" + additional_bindings { get: "/v1/{name=projects/*/locations/*/builds/*}" } }; option (google.api.method_signature) = "project_id,id"; } @@ -78,6 +93,7 @@ service CloudBuild { rpc ListBuilds(ListBuildsRequest) returns (ListBuildsResponse) { option (google.api.http) = { get: "/v1/projects/{project_id}/builds" + additional_bindings { get: "/v1/{parent=projects/*/locations/*}/builds" } }; option (google.api.method_signature) = "project_id,filter"; } @@ -87,6 +103,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds/{id}:cancel" body: "*" + additional_bindings { + post: "/v1/{name=projects/*/locations/*/builds/*}:cancel" + body: "*" + } }; option (google.api.method_signature) = "project_id,id"; } @@ -122,6 +142,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds/{id}:retry" body: "*" + additional_bindings { + post: "/v1/{name=projects/*/locations/*/builds/*}:retry" + body: "*" + } }; option (google.api.method_signature) = "project_id,id"; option (google.longrunning.operation_info) = { @@ -228,6 +252,12 @@ service CloudBuild { // Specifies a build to retry. message RetryBuildRequest { + // The name of the `Build` to retry. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 3 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -416,7 +446,7 @@ message BuildStep { // Output only. Stores timing information for pulling this build step's // builder image only. - TimeSpan pull_timing = 13; + TimeSpan pull_timing = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Time limit for executing this build step. If not defined, the step has no // time limit and will be allowed to continue to run until either it completes @@ -426,7 +456,7 @@ message BuildStep { // Output only. Status of the build step. At this time, build step status is // only updated on build completion; step status is not updated in real-time // as the build progresses. - Build.Status status = 12; + Build.Status status = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Volume describes a Docker container volume which is mounted into build steps @@ -502,6 +532,12 @@ message ArtifactResult { // resolved from the specified branch or tag. // - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. message Build { + option (google.api.resource) = { + type: "cloudbuild.googleapis.com/Build" + pattern: "projects/{project}/builds/{build}" + pattern: "projects/{project}/locations/{location}/builds/{build}" + }; + // Possible status of a build or build step. enum Status { // Status of the build is unknown. @@ -532,6 +568,11 @@ message Build { EXPIRED = 9; } + // Output only. The 'Build' name with format: + // `projects/{project}/locations/{location}/builds/{build}`, where {build} + // is a unique identifier generated by the service. + string name = 45 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Unique identifier of the build. string id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -539,10 +580,10 @@ message Build { string project_id = 16 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Status of the build. - Status status = 2; + Status status = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Customer-readable message about the current status. - string status_detail = 24; + string status_detail = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; // The location of the source files to build. Source source = 3; @@ -572,6 +613,8 @@ message Build { // granularity. If this amount of time elapses, work on the build will cease // and the build status will be `TIMEOUT`. // + // `timeout` starts ticking from `startTime`. + // // Default time is ten minutes. google.protobuf.Duration timeout = 12; @@ -605,11 +648,12 @@ message Build { string logs_bucket = 19; // Output only. A permanent fixed identifier for source. - SourceProvenance source_provenance = 21; + SourceProvenance source_provenance = 21 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The ID of the `BuildTrigger` that triggered this build, if it // was triggered automatically. - string build_trigger_id = 22; + string build_trigger_id = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; // Special options for this build. BuildOptions options = 23; @@ -636,6 +680,15 @@ message Build { // If the build does not specify source or images, // these keys will not be included. map timing = 33 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // IAM service account whose credentials will be used at build runtime. + // Must be of the format `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. + // ACCOUNT can be email address or uniqueId of the service account. + // + // This field is in alpha and is not publicly available. + string service_account = 42 [(google.api.resource_reference) = { + type: "iam.googleapis.com/ServiceAccount" + }]; } // Artifacts produced by a build that should be uploaded upon @@ -656,7 +709,7 @@ message Artifacts { repeated string paths = 2; // Output only. Stores timing information for pushing all artifact objects. - TimeSpan timing = 3; + TimeSpan timing = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A list of images to be pushed upon the successful completion of all build @@ -769,6 +822,12 @@ message Secret { // Request to create a new build. message CreateBuildRequest { + // The parent resource where this build will be created. + // Format: `projects/{project}/locations/{location}` + string parent = 4 [(google.api.resource_reference) = { + child_type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -778,6 +837,12 @@ message CreateBuildRequest { // Request to get a build. message GetBuildRequest { + // The name of the `Build` to retrieve. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 4 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -787,6 +852,12 @@ message GetBuildRequest { // Request to list builds. message ListBuildsRequest { + // The parent of the collection of `Builds`. + // Format: `projects/{project}/locations/location` + string parent = 9 [(google.api.resource_reference) = { + child_type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -811,6 +882,12 @@ message ListBuildsResponse { // Request to cancel an ongoing build. message CancelBuildRequest { + // The name of the `Build` to retrieve. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 4 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -821,6 +898,11 @@ message CancelBuildRequest { // Configuration for an automated build in response to source repository // changes. message BuildTrigger { + option (google.api.resource) = { + type: "cloudbuild.googleapis.com/BuildTrigger" + pattern: "projects/{project}/triggers/{trigger}" + }; + // Output only. Unique identifier of the trigger. string id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -867,12 +949,11 @@ message BuildTrigger { google.protobuf.Timestamp create_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - // If true, the trigger will never result in a build. + // If true, the trigger will never automatically execute a build. bool disabled = 9; // Substitutions for Build resource. The keys must match the following - // regular expression: `^_[A-Z0-9_]+$`.The keys cannot conflict with the - // keys in bindings. + // regular expression: `^_[A-Z0-9_]+$`. map substitutions = 11; // ignored_files and included_files are file glob matches using @@ -948,8 +1029,8 @@ message PullRequestFilter { string branch = 2; } - // Whether to block builds on a "/gcbrun" comment from a repository admin or - // collaborator. + // Configure builds to run whether a repository owner or collaborator need to + // comment `/gcbrun`. CommentControl comment_control = 5; // If true, branches that do NOT match the git_ref will trigger a build. @@ -1050,7 +1131,9 @@ message BuildOptions { VERIFIED = 1; } - // Supported VM sizes. + // Supported Compute Engine machine types. + // For more information, see [Machine + // types](https://cloud.google.com/compute/docs/machine-types). enum MachineType { // Standard machine type. UNSPECIFIED = 0; @@ -1091,11 +1174,23 @@ message BuildOptions { // rely on the default logging behavior as it may change in the future. LOGGING_UNSPECIFIED = 0; - // Stackdriver logging and Cloud Storage logging are enabled. + // Cloud Logging and Cloud Storage logging are enabled. LEGACY = 1; // Only Cloud Storage logging is enabled. GCS_ONLY = 2; + + // This option is the same as CLOUD_LOGGING_ONLY. + STACKDRIVER_ONLY = 3 [deprecated = true]; + + // Only Cloud Logging is enabled. Note that logs for both the Cloud Console + // UI and Cloud SDK are based on Cloud Storage logs, so neither will provide + // logs if this option is chosen. + CLOUD_LOGGING_ONLY = 5; + + // Turn off all logging. No build logs will be captured. + // Next ID: 6 + NONE = 4; } // Requested hash for SourceProvenance. @@ -1117,8 +1212,18 @@ message BuildOptions { // Option to specify behavior when there is an error in the substitution // checks. + // + // NOTE: this is always set to ALLOW_LOOSE for triggered builds and cannot + // be overridden in the build configuration file. SubstitutionOption substitution_option = 4; + // Option to specify whether or not to apply bash style string + // operations to the substitutions. + // + // NOTE: this is always enabled for triggered builds and cannot be + // overridden in the build configuration file. + bool dynamic_substitutions = 17; + // Option to define build log streaming behavior to Google Cloud // Storage. LogStreamingOption log_streaming_option = 5; @@ -1129,8 +1234,8 @@ message BuildOptions { // This field is experimental. string worker_pool = 7; - // Option to specify the logging mode, which determines where the logs are - // stored. + // Option to specify the logging mode, which determines if and where build + // logs are stored. LoggingMode logging = 11; // A list of global environment variable definitions that will exist for all diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 96421e96..469f1edf 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -28,14 +28,14 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import CloudBuildTransport +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport from .client import CloudBuildClient @@ -56,9 +56,50 @@ class CloudBuildAsyncClient: DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT + build_path = staticmethod(CloudBuildClient.build_path) + parse_build_path = staticmethod(CloudBuildClient.parse_build_path) + build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) + parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) + service_account_path = staticmethod(CloudBuildClient.service_account_path) + parse_service_account_path = staticmethod( + CloudBuildClient.parse_service_account_path + ) + + common_billing_account_path = staticmethod( + CloudBuildClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudBuildClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudBuildClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) + + common_organization_path = staticmethod(CloudBuildClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudBuildClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudBuildClient.common_project_path) + parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) + + common_location_path = staticmethod(CloudBuildClient.common_location_path) + parse_common_location_path = staticmethod( + CloudBuildClient.parse_common_location_path + ) + from_service_account_file = CloudBuildClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudBuildTransport: + """Return the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(CloudBuildClient).get_transport_class, type(CloudBuildClient) ) @@ -69,6 +110,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, CloudBuildTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud build client. @@ -84,16 +126,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -101,7 +146,10 @@ def __init__( """ self._client = CloudBuildClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def create_build( @@ -173,7 +221,8 @@ async def create_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, build]): + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -194,7 +243,7 @@ async def create_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -276,7 +325,8 @@ async def get_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -301,11 +351,11 @@ async def get_build( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -360,7 +410,8 @@ async def list_builds( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, filter]): + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -385,11 +436,11 @@ async def list_builds( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -465,7 +516,8 @@ async def cancel_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -486,7 +538,7 @@ async def cancel_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -590,7 +642,8 @@ async def retry_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -611,7 +664,7 @@ async def retry_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.retry_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -674,7 +727,8 @@ async def create_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger]): + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -695,7 +749,7 @@ async def create_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -751,7 +805,8 @@ async def get_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -776,11 +831,11 @@ async def get_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -830,7 +885,8 @@ async def list_build_triggers( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id]): + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -853,11 +909,11 @@ async def list_build_triggers( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -910,7 +966,8 @@ async def delete_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -935,11 +992,11 @@ async def delete_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -999,7 +1056,8 @@ async def update_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, trigger]): + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1022,7 +1080,7 @@ async def update_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1104,7 +1162,8 @@ async def run_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, source]): + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1127,7 +1186,7 @@ async def run_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.run_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1193,7 +1252,7 @@ async def create_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1254,11 +1313,11 @@ async def get_worker_pool( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1298,7 +1357,7 @@ async def delete_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1353,7 +1412,7 @@ async def update_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1401,11 +1460,11 @@ async def list_worker_pools( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1416,13 +1475,13 @@ async def list_worker_pools( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CloudBuildAsyncClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index e11623af..a301a515 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -16,28 +16,30 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import CloudBuildTransport +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudBuildGrpcTransport from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport @@ -138,12 +140,121 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudBuildTransport: + """Return the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def build_path(project: str, build: str,) -> str: + """Return a fully-qualified build string.""" + return "projects/{project}/builds/{build}".format(project=project, build=build,) + + @staticmethod + def parse_build_path(path: str) -> Dict[str, str]: + """Parse a build path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def build_trigger_path(project: str, trigger: str,) -> str: + """Return a fully-qualified build_trigger string.""" + return "projects/{project}/triggers/{trigger}".format( + project=project, trigger=trigger, + ) + + @staticmethod + def parse_build_trigger_path(path: str) -> Dict[str, str]: + """Parse a build_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path(project: str, service_account: str,) -> str: + """Return a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, service_account=service_account, + ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str, str]: + """Parse a service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudBuildTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud build client. @@ -156,48 +267,74 @@ def __init__( transport (Union[str, ~.CloudBuildTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -221,11 +358,11 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def create_build( @@ -1511,13 +1648,13 @@ def list_worker_pools( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CloudBuildClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index 32d9f153..d796b5b0 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -32,13 +32,13 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class CloudBuildTransport(abc.ABC): @@ -54,6 +54,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -71,6 +72,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -98,13 +104,13 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_build: gapic_v1.method.wrap_method( - self.create_build, default_timeout=600.0, client_info=_client_info, + self.create_build, default_timeout=600.0, client_info=client_info, ), self.get_build: gapic_v1.method.wrap_method( self.get_build, @@ -113,11 +119,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.list_builds: gapic_v1.method.wrap_method( self.list_builds, @@ -126,22 +132,22 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.cancel_build: gapic_v1.method.wrap_method( - self.cancel_build, default_timeout=600.0, client_info=_client_info, + self.cancel_build, default_timeout=600.0, client_info=client_info, ), self.retry_build: gapic_v1.method.wrap_method( - self.retry_build, default_timeout=600.0, client_info=_client_info, + self.retry_build, default_timeout=600.0, client_info=client_info, ), self.create_build_trigger: gapic_v1.method.wrap_method( self.create_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.get_build_trigger: gapic_v1.method.wrap_method( self.get_build_trigger, @@ -150,11 +156,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.list_build_triggers: gapic_v1.method.wrap_method( self.list_build_triggers, @@ -163,11 +169,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.delete_build_trigger: gapic_v1.method.wrap_method( self.delete_build_trigger, @@ -176,24 +182,22 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.update_build_trigger: gapic_v1.method.wrap_method( self.update_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.run_build_trigger: gapic_v1.method.wrap_method( - self.run_build_trigger, default_timeout=600.0, client_info=_client_info, + self.run_build_trigger, default_timeout=600.0, client_info=client_info, ), self.create_worker_pool: gapic_v1.method.wrap_method( - self.create_worker_pool, - default_timeout=600.0, - client_info=_client_info, + self.create_worker_pool, default_timeout=600.0, client_info=client_info, ), self.get_worker_pool: gapic_v1.method.wrap_method( self.get_worker_pool, @@ -202,21 +206,17 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), self.delete_worker_pool: gapic_v1.method.wrap_method( - self.delete_worker_pool, - default_timeout=600.0, - client_info=_client_info, + self.delete_worker_pool, default_timeout=600.0, client_info=client_info, ), self.update_worker_pool: gapic_v1.method.wrap_method( - self.update_worker_pool, - default_timeout=600.0, - client_info=_client_info, + self.update_worker_pool, default_timeout=600.0, client_info=client_info, ), self.list_worker_pools: gapic_v1.method.wrap_method( self.list_worker_pools, @@ -225,11 +225,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=client_info, ), } diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index c8affe84..88497cfe 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -15,22 +15,23 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import CloudBuildTransport +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO class CloudBuildGrpcTransport(CloudBuildTransport): @@ -65,7 +66,9 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -84,16 +87,23 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -101,6 +111,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -108,7 +120,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -139,6 +157,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -149,6 +185,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -159,7 +196,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -193,24 +230,13 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 817e30a5..a2716bb8 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -15,10 +15,13 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -29,7 +32,7 @@ from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import CloudBuildTransport +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .grpc import CloudBuildGrpcTransport @@ -107,7 +110,9 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -127,16 +132,23 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -144,6 +156,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -151,13 +165,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -177,6 +202,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -185,6 +228,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} @@ -196,13 +240,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index b7f5dbbd..ae9dca1a 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -76,12 +76,17 @@ class RetryBuildRequest(proto.Message): r"""Specifies a build to retry. Attributes: + name (str): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. Build ID of the original build. """ + name = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -116,7 +121,7 @@ class StorageSource(proto.Message): Google Cloud Storage bucket containing the source (see `Bucket Name Requirements `__). - object (str): + object_ (str): Google Cloud Storage object containing the source. This object must be a gzipped archive file (``.tar.gz``) @@ -129,7 +134,7 @@ class StorageSource(proto.Message): bucket = proto.Field(proto.STRING, number=1) - object = proto.Field(proto.STRING, number=2) + object_ = proto.Field(proto.STRING, number=2) generation = proto.Field(proto.INT64, number=3) @@ -157,7 +162,7 @@ class RepoSource(proto.Message): https://github.com/google/re2/wiki/Syntax commit_sha (str): Explicit commit SHA to build. - dir (str): + dir_ (str): Directory, relative to the source root, in which to run the build. @@ -182,7 +187,7 @@ class RepoSource(proto.Message): commit_sha = proto.Field(proto.STRING, number=5, oneof="revision") - dir = proto.Field(proto.STRING, number=7) + dir_ = proto.Field(proto.STRING, number=7) invert_regex = proto.Field(proto.BOOL, number=8) @@ -202,11 +207,11 @@ class Source(proto.Message): """ storage_source = proto.Field( - proto.MESSAGE, number=2, oneof="source", message=StorageSource, + proto.MESSAGE, number=2, oneof="source", message="StorageSource", ) repo_source = proto.Field( - proto.MESSAGE, number=3, oneof="source", message=RepoSource, + proto.MESSAGE, number=3, oneof="source", message="RepoSource", ) @@ -269,7 +274,7 @@ class BuildStep(proto.Message): entrypoint. If the image does not define an entrypoint, the first element in args is used as the entrypoint, and the remainder will be used as arguments. - dir (str): + dir_ (str): Working directory to use when running this step's container. If this value is a relative path, it is relative to the @@ -333,7 +338,7 @@ class BuildStep(proto.Message): args = proto.RepeatedField(proto.STRING, number=3) - dir = proto.Field(proto.STRING, number=4) + dir_ = proto.Field(proto.STRING, number=4) id = proto.Field(proto.STRING, number=5) @@ -407,7 +412,7 @@ class Results(proto.Message): Time to push all non-container artifacts. """ - images = proto.RepeatedField(proto.MESSAGE, number=2, message=BuiltImage,) + images = proto.RepeatedField(proto.MESSAGE, number=2, message="BuiltImage",) build_step_images = proto.RepeatedField(proto.STRING, number=3) @@ -458,6 +463,11 @@ class Build(proto.Message): - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. Attributes: + name (str): + Output only. The 'Build' name with format: + ``projects/{project}/locations/{location}/builds/{build}``, + where {build} is a unique identifier generated by the + service. id (str): Output only. Unique identifier of the build. project_id (str): @@ -492,6 +502,8 @@ class Build(proto.Message): the build will cease and the build status will be ``TIMEOUT``. + ``timeout`` starts ticking from ``startTime``. + Default time is ten minutes. images (Sequence[str]): A list of images to be pushed upon the successful completion @@ -550,6 +562,13 @@ class Build(proto.Message): If the build does not specify source or images, these keys will not be included. + service_account (str): + IAM service account whose credentials will be used at build + runtime. Must be of the format + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT + can be email address or uniqueId of the service account. + + This field is in alpha and is not publicly available. """ class Status(proto.Enum): @@ -564,6 +583,8 @@ class Status(proto.Enum): CANCELLED = 7 EXPIRED = 9 + name = proto.Field(proto.STRING, number=45) + id = proto.Field(proto.STRING, number=1) project_id = proto.Field(proto.STRING, number=16) @@ -572,11 +593,11 @@ class Status(proto.Enum): status_detail = proto.Field(proto.STRING, number=24) - source = proto.Field(proto.MESSAGE, number=3, message=Source,) + source = proto.Field(proto.MESSAGE, number=3, message="Source",) - steps = proto.RepeatedField(proto.MESSAGE, number=11, message=BuildStep,) + steps = proto.RepeatedField(proto.MESSAGE, number=11, message="BuildStep",) - results = proto.Field(proto.MESSAGE, number=10, message=Results,) + results = proto.Field(proto.MESSAGE, number=10, message="Results",) create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) @@ -612,6 +633,8 @@ class Status(proto.Enum): timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, message="TimeSpan",) + service_account = proto.Field(proto.STRING, number=42) + class Artifacts(proto.Message): r"""Artifacts produced by a build that should be uploaded upon @@ -700,7 +723,7 @@ class BuildOperationMetadata(proto.Message): The build that the operation is tracking. """ - build = proto.Field(proto.MESSAGE, number=1, message=Build,) + build = proto.Field(proto.MESSAGE, number=1, message="Build",) class SourceProvenance(proto.Message): @@ -730,10 +753,10 @@ class SourceProvenance(proto.Message): """ resolved_storage_source = proto.Field( - proto.MESSAGE, number=3, message=StorageSource, + proto.MESSAGE, number=3, message="StorageSource", ) - resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message=RepoSource,) + resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message="RepoSource",) file_hashes = proto.MapField( proto.STRING, proto.MESSAGE, number=4, message="FileHashes", @@ -757,7 +780,7 @@ class Hash(proto.Message): r"""Container message for hash values. Attributes: - type (~.cloudbuild.Hash.HashType): + type_ (~.cloudbuild.Hash.HashType): The type of hash that was performed. value (bytes): The hash value. @@ -769,7 +792,7 @@ class HashType(proto.Enum): SHA256 = 1 MD5 = 2 - type = proto.Field(proto.ENUM, number=1, enum=HashType,) + type_ = proto.Field(proto.ENUM, number=1, enum=HashType,) value = proto.Field(proto.BYTES, number=2) @@ -802,27 +825,37 @@ class CreateBuildRequest(proto.Message): r"""Request to create a new build. Attributes: + parent (str): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` project_id (str): Required. ID of the project. build (~.cloudbuild.Build): Required. Build resource to create. """ + parent = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) - build = proto.Field(proto.MESSAGE, number=2, message=Build,) + build = proto.Field(proto.MESSAGE, number=2, message="Build",) class GetBuildRequest(proto.Message): r"""Request to get a build. Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. ID of the build. """ + name = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -832,6 +865,9 @@ class ListBuildsRequest(proto.Message): r"""Request to list builds. Attributes: + parent (str): + The parent of the collection of ``Builds``. Format: + ``projects/{project}/locations/location`` project_id (str): Required. ID of the project. page_size (int): @@ -843,6 +879,8 @@ class ListBuildsRequest(proto.Message): The raw filter text to constrain the results. """ + parent = proto.Field(proto.STRING, number=9) + project_id = proto.Field(proto.STRING, number=1) page_size = proto.Field(proto.INT32, number=2) @@ -866,7 +904,7 @@ class ListBuildsResponse(proto.Message): def raw_page(self): return self - builds = proto.RepeatedField(proto.MESSAGE, number=1, message=Build,) + builds = proto.RepeatedField(proto.MESSAGE, number=1, message="Build",) next_page_token = proto.Field(proto.STRING, number=2) @@ -875,12 +913,17 @@ class CancelBuildRequest(proto.Message): r"""Request to cancel an ongoing build. Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. ID of the build. """ + name = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -929,12 +972,11 @@ class BuildTrigger(proto.Message): Output only. Time when the trigger was created. disabled (bool): - If true, the trigger will never result in a - build. + If true, the trigger will never automatically + execute a build. substitutions (Sequence[~.cloudbuild.BuildTrigger.SubstitutionsEntry]): Substitutions for Build resource. The keys must match the - following regular expression: ``^_[A-Z0-9_]+$``.The keys - cannot conflict with the keys in bindings. + following regular expression: ``^_[A-Z0-9_]+$``. ignored_files (Sequence[str]): ignored_files and included_files are file glob matches using https://golang.org/pkg/path/filepath/#Match extended with @@ -967,11 +1009,13 @@ class BuildTrigger(proto.Message): tags = proto.RepeatedField(proto.STRING, number=19) - trigger_template = proto.Field(proto.MESSAGE, number=7, message=RepoSource,) + trigger_template = proto.Field(proto.MESSAGE, number=7, message="RepoSource",) github = proto.Field(proto.MESSAGE, number=13, message="GitHubEventsConfig",) - build = proto.Field(proto.MESSAGE, number=4, oneof="build_template", message=Build,) + build = proto.Field( + proto.MESSAGE, number=4, oneof="build_template", message="Build", + ) filename = proto.Field(proto.STRING, number=8, oneof="build_template") @@ -1036,8 +1080,8 @@ class PullRequestFilter(proto.Message): is the syntax accepted by RE2 and described at https://github.com/google/re2/wiki/Syntax comment_control (~.cloudbuild.PullRequestFilter.CommentControl): - Whether to block builds on a "/gcbrun" - comment from a repository admin or collaborator. + Configure builds to run whether a repository owner or + collaborator need to comment ``/gcbrun``. invert_regex (bool): If true, branches that do NOT match the git_ref will trigger a build. @@ -1095,7 +1139,7 @@ class CreateBuildTriggerRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - trigger = proto.Field(proto.MESSAGE, number=2, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=2, message="BuildTrigger",) class GetBuildTriggerRequest(proto.Message): @@ -1151,7 +1195,7 @@ class ListBuildTriggersResponse(proto.Message): def raw_page(self): return self - triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=BuildTrigger,) + triggers = proto.RepeatedField(proto.MESSAGE, number=1, message="BuildTrigger",) next_page_token = proto.Field(proto.STRING, number=2) @@ -1189,7 +1233,7 @@ class UpdateBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - trigger = proto.Field(proto.MESSAGE, number=3, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=3, message="BuildTrigger",) class BuildOptions(proto.Message): @@ -1213,8 +1257,18 @@ class BuildOptions(proto.Message): builds that request more than the maximum are rejected with an error. substitution_option (~.cloudbuild.BuildOptions.SubstitutionOption): - Option to specify behavior when there is an - error in the substitution checks. + Option to specify behavior when there is an error in the + substitution checks. + + NOTE: this is always set to ALLOW_LOOSE for triggered builds + and cannot be overridden in the build configuration file. + dynamic_substitutions (bool): + Option to specify whether or not to apply + bash style string operations to the + substitutions. + NOTE: this is always enabled for triggered + builds and cannot be overridden in the build + configuration file. log_streaming_option (~.cloudbuild.BuildOptions.LogStreamingOption): Option to define build log streaming behavior to Google Cloud Storage. @@ -1225,7 +1279,7 @@ class BuildOptions(proto.Message): This field is experimental. logging (~.cloudbuild.BuildOptions.LoggingMode): Option to specify the logging mode, which - determines where the logs are stored. + determines if and where build logs are stored. env (Sequence[str]): A list of global environment variable definitions that will exist for all build steps @@ -1263,7 +1317,10 @@ class VerifyOption(proto.Enum): VERIFIED = 1 class MachineType(proto.Enum): - r"""Supported VM sizes.""" + r"""Supported Compute Engine machine types. For more information, see + `Machine + types `__. + """ UNSPECIFIED = 0 N1_HIGHCPU_8 = 1 N1_HIGHCPU_32 = 2 @@ -1288,9 +1345,12 @@ class LoggingMode(proto.Enum): LOGGING_UNSPECIFIED = 0 LEGACY = 1 GCS_ONLY = 2 + STACKDRIVER_ONLY = 3 + CLOUD_LOGGING_ONLY = 5 + NONE = 4 source_provenance_hash = proto.RepeatedField( - proto.ENUM, number=1, enum=Hash.HashType, + proto.ENUM, number=1, enum="Hash.HashType", ) requested_verify_option = proto.Field(proto.ENUM, number=2, enum=VerifyOption,) @@ -1301,6 +1361,8 @@ class LoggingMode(proto.Enum): substitution_option = proto.Field(proto.ENUM, number=4, enum=SubstitutionOption,) + dynamic_substitutions = proto.Field(proto.BOOL, number=17) + log_streaming_option = proto.Field(proto.ENUM, number=5, enum=LogStreamingOption,) worker_pool = proto.Field(proto.STRING, number=7) @@ -1311,7 +1373,7 @@ class LoggingMode(proto.Enum): secret_env = proto.RepeatedField(proto.STRING, number=13) - volumes = proto.RepeatedField(proto.MESSAGE, number=14, message=Volume,) + volumes = proto.RepeatedField(proto.MESSAGE, number=14, message="Volume",) class WorkerPool(proto.Message): @@ -1477,7 +1539,7 @@ class CreateWorkerPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - worker_pool = proto.Field(proto.MESSAGE, number=2, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=2, message="WorkerPool",) class GetWorkerPoolRequest(proto.Message): @@ -1520,7 +1582,7 @@ class UpdateWorkerPoolRequest(proto.Message): name = proto.Field(proto.STRING, number=2) - worker_pool = proto.Field(proto.MESSAGE, number=3, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=3, message="WorkerPool",) class ListWorkerPoolsRequest(proto.Message): @@ -1542,7 +1604,7 @@ class ListWorkerPoolsResponse(proto.Message): ``WorkerPools`` for the project. """ - worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message=WorkerPool,) + worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message="WorkerPool",) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 3ba8d36e..2c77492f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -72,7 +72,9 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. @@ -102,6 +104,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -169,7 +175,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index ff599eb2..21f6d2a2 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/fixup_cloudbuild_v1_keywords.py b/scripts/fixup_cloudbuild_v1_keywords.py index 8d7e6d83..04ca294c 100644 --- a/scripts/fixup_cloudbuild_v1_keywords.py +++ b/scripts/fixup_cloudbuild_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC @@ -40,19 +41,19 @@ def partition( class cloudbuildCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_build': ('project_id', 'id', ), - 'create_build': ('project_id', 'build', ), + 'cancel_build': ('project_id', 'id', 'name', ), + 'create_build': ('project_id', 'build', 'parent', ), 'create_build_trigger': ('project_id', 'trigger', ), 'create_worker_pool': ('parent', 'worker_pool', ), 'delete_build_trigger': ('project_id', 'trigger_id', ), 'delete_worker_pool': ('name', ), - 'get_build': ('project_id', 'id', ), + 'get_build': ('project_id', 'id', 'name', ), 'get_build_trigger': ('project_id', 'trigger_id', ), 'get_worker_pool': ('name', ), - 'list_builds': ('project_id', 'page_size', 'page_token', 'filter', ), + 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), 'list_build_triggers': ('project_id', 'page_size', 'page_token', ), 'list_worker_pools': ('parent', ), - 'retry_build': ('project_id', 'id', ), + 'retry_build': ('project_id', 'id', 'name', ), 'run_build_trigger': ('project_id', 'trigger_id', 'source', ), 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), 'update_worker_pool': ('name', 'worker_pool', ), diff --git a/synth.metadata b/synth.metadata index 3e5ab1e1..62e8880c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,23 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-cloudbuild.git", - "sha": "e3be0483b5b90b9b03d700f542b9154eb0e7c2dd" + "remote": "git@github.com:googleapis/python-cloudbuild.git", + "sha": "ef8725c273c132097a5b91590fc0c6ec5d172641" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fbf9396664b766a08d92da9d4f31be019a847c39", - "internalRef": "324209019" + "sha": "705962b5a317e083fdbb2311d4baa72df5816686", + "internalRef": "340463146" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "39b527a39f5cd56d4882b3874fc08eed4756cebe" + "sha": "ba9918cd22874245b55734f57470c719b577e591" } } ], diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 44666a8e..f2583b92 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -31,7 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError @@ -97,12 +97,12 @@ def test_cloud_build_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client.transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_client_get_transport_class(): @@ -156,14 +156,14 @@ def test_cloud_build_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -172,14 +172,14 @@ def test_cloud_build_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -188,90 +188,173 @@ def test_cloud_build_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) +) +@mock.patch.object( + CloudBuildAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudBuildAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_build_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -298,9 +381,9 @@ def test_cloud_build_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -328,9 +411,9 @@ def test_cloud_build_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -345,9 +428,9 @@ def test_cloud_build_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -363,7 +446,7 @@ def test_create_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -384,19 +467,19 @@ def test_create_build_from_dict(): @pytest.mark.asyncio -async def test_create_build_async(transport: str = "grpc_asyncio"): +async def test_create_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -408,24 +491,29 @@ async def test_create_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_create_build_async_from_dict(): + await test_create_build_async(request_type=dict) + + def test_create_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id="project_id_value", build=cloudbuild.Build(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -435,7 +523,7 @@ def test_create_build_flattened(): assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(name="name_value") def test_create_build_flattened_error(): @@ -447,7 +535,7 @@ def test_create_build_flattened_error(): client.create_build( cloudbuild.CreateBuildRequest(), project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) @@ -456,9 +544,7 @@ async def test_create_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -468,7 +554,7 @@ async def test_create_build_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id="project_id_value", build=cloudbuild.Build(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -478,7 +564,7 @@ async def test_create_build_flattened_async(): assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(name="name_value") @pytest.mark.asyncio @@ -491,7 +577,7 @@ async def test_create_build_flattened_error_async(): await client.create_build( cloudbuild.CreateBuildRequest(), project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) @@ -505,9 +591,10 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -517,6 +604,7 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) response = client.get_build(request) @@ -528,8 +616,11 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ assert args[0] == cloudbuild.GetBuildRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -548,28 +639,31 @@ def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequ assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + def test_get_build_from_dict(): test_get_build(request_type=dict) @pytest.mark.asyncio -async def test_get_build_async(transport: str = "grpc_asyncio"): +async def test_get_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -579,6 +673,7 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) ) @@ -588,11 +683,13 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -611,12 +708,19 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_build_async_from_dict(): + await test_get_build_async(request_type=dict) + def test_get_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -652,9 +756,7 @@ async def test_get_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -697,7 +799,7 @@ def test_list_builds( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse( next_page_token="next_page_token_value", @@ -712,6 +814,7 @@ def test_list_builds( assert args[0] == cloudbuild.ListBuildsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) assert response.next_page_token == "next_page_token_value" @@ -722,19 +825,19 @@ def test_list_builds_from_dict(): @pytest.mark.asyncio -async def test_list_builds_async(transport: str = "grpc_asyncio"): +async def test_list_builds_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListBuildsRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.ListBuildsResponse(next_page_token="next_page_token_value",) @@ -746,7 +849,7 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsAsyncPager) @@ -754,11 +857,16 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_builds_async_from_dict(): + await test_list_builds_async(request_type=dict) + + def test_list_builds_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() @@ -796,9 +904,7 @@ async def test_list_builds_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() @@ -839,7 +945,7 @@ def test_list_builds_pager(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( @@ -870,7 +976,7 @@ def test_list_builds_pages(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( @@ -887,8 +993,8 @@ def test_list_builds_pages(): RuntimeError, ) pages = list(client.list_builds(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -897,9 +1003,7 @@ async def test_list_builds_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_builds), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -932,9 +1036,7 @@ async def test_list_builds_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_builds), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -952,10 +1054,10 @@ async def test_list_builds_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_builds(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_builds(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_cancel_build( @@ -970,9 +1072,10 @@ def test_cancel_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -982,6 +1085,7 @@ def test_cancel_build( build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) response = client.cancel_build(request) @@ -993,8 +1097,11 @@ def test_cancel_build( assert args[0] == cloudbuild.CancelBuildRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -1013,28 +1120,31 @@ def test_cancel_build( assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + def test_cancel_build_from_dict(): test_cancel_build(request_type=dict) @pytest.mark.asyncio -async def test_cancel_build_async(transport: str = "grpc_asyncio"): +async def test_cancel_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CancelBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CancelBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -1044,6 +1154,7 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) ) @@ -1053,11 +1164,13 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CancelBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -1076,12 +1189,19 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_cancel_build_async_from_dict(): + await test_cancel_build_async(request_type=dict) + def test_cancel_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1119,9 +1239,7 @@ async def test_cancel_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1168,7 +1286,7 @@ def test_retry_build( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1189,19 +1307,19 @@ def test_retry_build_from_dict(): @pytest.mark.asyncio -async def test_retry_build_async(transport: str = "grpc_asyncio"): +async def test_retry_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.RetryBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RetryBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1213,17 +1331,22 @@ async def test_retry_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RetryBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_retry_build_async_from_dict(): + await test_retry_build_async(request_type=dict) + + def test_retry_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1261,9 +1384,7 @@ async def test_retry_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1313,7 +1434,7 @@ def test_create_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -1324,7 +1445,7 @@ def test_create_build_trigger( disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.create_build_trigger(request) @@ -1336,6 +1457,7 @@ def test_create_build_trigger( assert args[0] == cloudbuild.CreateBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -1358,18 +1480,20 @@ def test_create_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_create_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1390,7 +1514,7 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1410,12 +1534,17 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_create_build_trigger_async_from_dict(): + await test_create_build_trigger_async(request_type=dict) + + def test_create_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1456,7 +1585,7 @@ async def test_create_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1508,7 +1637,7 @@ def test_get_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -1519,7 +1648,7 @@ def test_get_build_trigger( disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.get_build_trigger(request) @@ -1531,6 +1660,7 @@ def test_get_build_trigger( assert args[0] == cloudbuild.GetBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -1553,18 +1683,20 @@ def test_get_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_get_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1585,7 +1717,7 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1605,12 +1737,17 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_get_build_trigger_async_from_dict(): + await test_get_build_trigger_async(request_type=dict) + + def test_get_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1650,7 +1787,7 @@ async def test_get_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1701,7 +1838,7 @@ def test_list_build_triggers( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse( @@ -1717,6 +1854,7 @@ def test_list_build_triggers( assert args[0] == cloudbuild.ListBuildTriggersRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) assert response.next_page_token == "next_page_token_value" @@ -1727,18 +1865,20 @@ def test_list_build_triggers_from_dict(): @pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): +async def test_list_build_triggers_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListBuildTriggersRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildTriggersRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1753,7 +1893,7 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildTriggersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersAsyncPager) @@ -1761,12 +1901,17 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_build_triggers_async_from_dict(): + await test_list_build_triggers_async(request_type=dict) + + def test_list_build_triggers_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() @@ -1800,7 +1945,7 @@ async def test_list_build_triggers_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() @@ -1837,7 +1982,7 @@ def test_list_build_triggers_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1874,7 +2019,7 @@ def test_list_build_triggers_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1896,8 +2041,8 @@ def test_list_build_triggers_pages(): RuntimeError, ) pages = list(client.list_build_triggers(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1906,7 +2051,7 @@ async def test_list_build_triggers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), + type(client.transport.list_build_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1945,7 +2090,7 @@ async def test_list_build_triggers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), + type(client.transport.list_build_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1969,10 +2114,10 @@ async def test_list_build_triggers_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_build_triggers(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_build_triggers(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_delete_build_trigger( @@ -1988,7 +2133,7 @@ def test_delete_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2010,18 +2155,20 @@ def test_delete_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_delete_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.DeleteBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2032,18 +2179,23 @@ async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_build_trigger_async_from_dict(): + await test_delete_build_trigger_async(request_type=dict) + + def test_delete_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2083,7 +2235,7 @@ async def test_delete_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2132,7 +2284,7 @@ def test_update_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -2143,7 +2295,7 @@ def test_update_build_trigger( disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.update_build_trigger(request) @@ -2155,6 +2307,7 @@ def test_update_build_trigger( assert args[0] == cloudbuild.UpdateBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -2177,18 +2330,20 @@ def test_update_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_update_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.UpdateBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2209,7 +2364,7 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -2229,12 +2384,17 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_update_build_trigger_async_from_dict(): + await test_update_build_trigger_async(request_type=dict) + + def test_update_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -2279,7 +2439,7 @@ async def test_update_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -2335,7 +2495,7 @@ def test_run_build_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2357,18 +2517,20 @@ def test_run_build_trigger_from_dict(): @pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_run_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.RunBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RunBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2381,18 +2543,23 @@ async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RunBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_run_build_trigger_async_from_dict(): + await test_run_build_trigger_async(request_type=dict) + + def test_run_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2437,7 +2604,7 @@ async def test_run_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2493,7 +2660,7 @@ def test_create_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_worker_pool), "__call__" + type(client.transport.create_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( @@ -2514,6 +2681,7 @@ def test_create_worker_pool( assert args[0] == cloudbuild.CreateWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2534,18 +2702,20 @@ def test_create_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_create_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_worker_pool), "__call__" + type(client.transport.create_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2565,7 +2735,7 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2583,6 +2753,11 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +@pytest.mark.asyncio +async def test_create_worker_pool_async_from_dict(): + await test_create_worker_pool_async(request_type=dict) + + def test_get_worker_pool( transport: str = "grpc", request_type=cloudbuild.GetWorkerPoolRequest ): @@ -2595,7 +2770,7 @@ def test_get_worker_pool( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_worker_pool), "__call__") as call: + with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( name="name_value", @@ -2615,6 +2790,7 @@ def test_get_worker_pool( assert args[0] == cloudbuild.GetWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2635,19 +2811,19 @@ def test_get_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_get_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_worker_pool), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.WorkerPool( @@ -2666,7 +2842,7 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2684,6 +2860,11 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +@pytest.mark.asyncio +async def test_get_worker_pool_async_from_dict(): + await test_get_worker_pool_async(request_type=dict) + + def test_delete_worker_pool( transport: str = "grpc", request_type=cloudbuild.DeleteWorkerPoolRequest ): @@ -2697,7 +2878,7 @@ def test_delete_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_worker_pool), "__call__" + type(client.transport.delete_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2719,18 +2900,20 @@ def test_delete_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_delete_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.DeleteWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_worker_pool), "__call__" + type(client.transport.delete_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2741,12 +2924,17 @@ async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_worker_pool_async_from_dict(): + await test_delete_worker_pool_async(request_type=dict) + + def test_update_worker_pool( transport: str = "grpc", request_type=cloudbuild.UpdateWorkerPoolRequest ): @@ -2760,7 +2948,7 @@ def test_update_worker_pool( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_worker_pool), "__call__" + type(client.transport.update_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( @@ -2781,6 +2969,7 @@ def test_update_worker_pool( assert args[0] == cloudbuild.UpdateWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2801,18 +2990,20 @@ def test_update_worker_pool_from_dict(): @pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_update_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.UpdateWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_worker_pool), "__call__" + type(client.transport.update_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2832,7 +3023,7 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2850,6 +3041,11 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +@pytest.mark.asyncio +async def test_update_worker_pool_async_from_dict(): + await test_update_worker_pool_async(request_type=dict) + + def test_list_worker_pools( transport: str = "grpc", request_type=cloudbuild.ListWorkerPoolsRequest ): @@ -2863,7 +3059,7 @@ def test_list_worker_pools( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_worker_pools), "__call__" + type(client.transport.list_worker_pools), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListWorkerPoolsResponse() @@ -2877,6 +3073,7 @@ def test_list_worker_pools( assert args[0] == cloudbuild.ListWorkerPoolsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ListWorkerPoolsResponse) @@ -2885,18 +3082,20 @@ def test_list_worker_pools_from_dict(): @pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): +async def test_list_worker_pools_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListWorkerPoolsRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListWorkerPoolsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_worker_pools), "__call__" + type(client.transport.list_worker_pools), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2909,12 +3108,17 @@ async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListWorkerPoolsRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.ListWorkerPoolsResponse) +@pytest.mark.asyncio +async def test_list_worker_pools_async_from_dict(): + await test_list_worker_pools_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudBuildGrpcTransport( @@ -2951,7 +3155,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = CloudBuildClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -2969,10 +3173,22 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.CloudBuildGrpcTransport,) + assert isinstance(client.transport, transports.CloudBuildGrpcTransport,) def test_cloud_build_base_transport_error(): @@ -3043,6 +3259,17 @@ def test_cloud_build_base_transport_with_credentials_file(): ) +def test_cloud_build_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport() + adc.assert_called_once() + + def test_cloud_build_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -3075,7 +3302,7 @@ def test_cloud_build_host_no_port(): api_endpoint="cloudbuild.googleapis.com" ), ) - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client.transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_host_with_port(): @@ -3085,192 +3312,118 @@ def test_cloud_build_host_with_port(): api_endpoint="cloudbuild.googleapis.com:8000" ), ) - assert client._transport._host == "cloudbuild.googleapis.com:8000" + assert client.transport._host == "cloudbuild.googleapis.com:8000" def test_cloud_build_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_cloud_build_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_build_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_build_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_cloud_build_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_cloud_build_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_cloud_build_grpc_lro_client(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -3283,10 +3436,199 @@ def test_cloud_build_grpc_lro_async_client(): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + + +def test_build_path(): + project = "squid" + build = "clam" + + expected = "projects/{project}/builds/{build}".format(project=project, build=build,) + actual = CloudBuildClient.build_path(project, build) + assert expected == actual + + +def test_parse_build_path(): + expected = { + "project": "whelk", + "build": "octopus", + } + path = CloudBuildClient.build_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_path(path) + assert expected == actual + + +def test_build_trigger_path(): + project = "oyster" + trigger = "nudibranch" + + expected = "projects/{project}/triggers/{trigger}".format( + project=project, trigger=trigger, + ) + actual = CloudBuildClient.build_trigger_path(project, trigger) + assert expected == actual + + +def test_parse_build_trigger_path(): + expected = { + "project": "cuttlefish", + "trigger": "mussel", + } + path = CloudBuildClient.build_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_trigger_path(path) + assert expected == actual + + +def test_service_account_path(): + project = "winkle" + service_account = "nautilus" + + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, service_account=service_account, + ) + actual = CloudBuildClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "scallop", + "service_account": "abalone", + } + path = CloudBuildClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_service_account_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudBuildClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = CloudBuildClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudBuildClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = CloudBuildClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudBuildClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = CloudBuildClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + + expected = "projects/{project}".format(project=project,) + actual = CloudBuildClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = CloudBuildClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudBuildClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = CloudBuildClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudBuildTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudBuildTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudBuildClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)