diff --git a/docs/bigquery_datatransfer_v1/data_transfer_service.rst b/docs/bigquery_datatransfer_v1/data_transfer_service.rst index 58f85396..480f43ed 100644 --- a/docs/bigquery_datatransfer_v1/data_transfer_service.rst +++ b/docs/bigquery_datatransfer_v1/data_transfer_service.rst @@ -5,7 +5,6 @@ DataTransferService :members: :inherited-members: - .. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers :members: :inherited-members: diff --git a/google/cloud/bigquery_datatransfer/__init__.py b/google/cloud/bigquery_datatransfer/__init__.py index 721bb151..bcde8ce8 100644 --- a/google/cloud/bigquery_datatransfer/__init__.py +++ b/google/cloud/bigquery_datatransfer/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,12 +14,13 @@ # limitations under the License. # -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import ( - DataTransferServiceAsyncClient, -) from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.client import ( DataTransferServiceClient, ) +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import ( + DataTransferServiceAsyncClient, +) + from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ( CheckValidCredsRequest, ) @@ -95,16 +95,15 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferType __all__ = ( + "DataTransferServiceClient", + "DataTransferServiceAsyncClient", "CheckValidCredsRequest", "CheckValidCredsResponse", "CreateTransferConfigRequest", "DataSource", "DataSourceParameter", - "DataTransferServiceAsyncClient", - "DataTransferServiceClient", "DeleteTransferConfigRequest", "DeleteTransferRunRequest", - "EmailPreferences", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -116,15 +115,16 @@ "ListTransferLogsResponse", "ListTransferRunsRequest", "ListTransferRunsResponse", - "ScheduleOptions", "ScheduleTransferRunsRequest", "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "UpdateTransferConfigRequest", + "EmailPreferences", + "ScheduleOptions", "TransferConfig", "TransferMessage", "TransferRun", "TransferState", "TransferType", - "UpdateTransferConfigRequest", ) diff --git a/google/cloud/bigquery_datatransfer_v1/__init__.py b/google/cloud/bigquery_datatransfer_v1/__init__.py index 258e1f36..1c6f0fc8 100644 --- a/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,6 +16,7 @@ from .services.data_transfer_service import DataTransferServiceClient from .services.data_transfer_service import DataTransferServiceAsyncClient + from .types.datatransfer import CheckValidCredsRequest from .types.datatransfer import CheckValidCredsResponse from .types.datatransfer import CreateTransferConfigRequest @@ -48,13 +48,14 @@ from .types.transfer import TransferState from .types.transfer import TransferType - __all__ = ( + "DataTransferServiceAsyncClient", "CheckValidCredsRequest", "CheckValidCredsResponse", "CreateTransferConfigRequest", "DataSource", "DataSourceParameter", + "DataTransferServiceClient", "DeleteTransferConfigRequest", "DeleteTransferRunRequest", "EmailPreferences", @@ -80,6 +81,4 @@ "TransferState", "TransferType", "UpdateTransferConfigRequest", - "DataTransferServiceClient", - "DataTransferServiceAsyncClient", ) diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json new file mode 100644 index 00000000..75ee9340 --- /dev/null +++ b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json @@ -0,0 +1,163 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_datatransfer_v1", + "protoPackage": "google.cloud.bigquery.datatransfer.v1", + "schema": "1.0", + "services": { + "DataTransferService": { + "clients": { + "grpc": { + "libraryClient": "DataTransferServiceClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataTransferServiceAsyncClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/bigquery_datatransfer_v1/services/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py index b64f150a..392ecf49 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import DataTransferServiceClient from .async_client import DataTransferServiceAsyncClient diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 47ac9f6d..d0454623 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,21 +20,20 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport from .client import DataTransferServiceClient @@ -64,31 +61,26 @@ class DataTransferServiceAsyncClient: parse_transfer_config_path = staticmethod( DataTransferServiceClient.parse_transfer_config_path ) - common_billing_account_path = staticmethod( DataTransferServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( DataTransferServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(DataTransferServiceClient.common_folder_path) parse_common_folder_path = staticmethod( DataTransferServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( DataTransferServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( DataTransferServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(DataTransferServiceClient.common_project_path) parse_common_project_path = staticmethod( DataTransferServiceClient.parse_common_project_path ) - common_location_path = staticmethod(DataTransferServiceClient.common_location_path) parse_common_location_path = staticmethod( DataTransferServiceClient.parse_common_location_path @@ -96,7 +88,8 @@ class DataTransferServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -111,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -128,7 +121,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataTransferServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: DataTransferServiceTransport: The transport used by the client instance. @@ -143,12 +136,12 @@ def transport(self) -> DataTransferServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, DataTransferServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data transfer service client. + """Instantiates the data transfer service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -180,7 +173,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = DataTransferServiceClient( credentials=credentials, transport=transport, @@ -213,7 +205,6 @@ async def get_data_source( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -241,7 +232,6 @@ async def get_data_source( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -254,7 +244,8 @@ async def get_data_source( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -299,7 +290,6 @@ async def list_data_sources( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -329,7 +319,6 @@ async def list_data_sources( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -342,7 +331,8 @@ async def list_data_sources( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -408,7 +398,6 @@ async def create_transfer_config( This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -441,7 +430,6 @@ async def create_transfer_config( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if transfer_config is not None: @@ -472,7 +460,7 @@ async def update_transfer_config( request: datatransfer.UpdateTransferConfigRequest = None, *, transfer_config: transfer.TransferConfig = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -500,7 +488,6 @@ async def update_transfer_config( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -533,7 +520,6 @@ async def update_transfer_config( # If we have keyword arguments corresponding to fields on the # request, apply these. - if transfer_config is not None: request.transfer_config = transfer_config if update_mask is not None: @@ -587,7 +573,6 @@ async def delete_transfer_config( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -608,7 +593,6 @@ async def delete_transfer_config( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -621,7 +605,8 @@ async def delete_transfer_config( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -664,7 +649,6 @@ async def get_transfer_config( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -697,7 +681,6 @@ async def get_transfer_config( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -710,7 +693,8 @@ async def get_transfer_config( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -754,7 +738,6 @@ async def list_transfer_configs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -784,7 +767,6 @@ async def list_transfer_configs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -797,7 +779,8 @@ async def list_transfer_configs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -828,8 +811,8 @@ async def schedule_transfer_runs( request: datatransfer.ScheduleTransferRunsRequest = None, *, parent: str = None, - start_time: timestamp.Timestamp = None, - end_time: timestamp.Timestamp = None, + start_time: timestamp_pb2.Timestamp = None, + end_time: timestamp_pb2.Timestamp = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -866,7 +849,6 @@ async def schedule_transfer_runs( This corresponds to the ``end_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -893,7 +875,6 @@ async def schedule_transfer_runs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if start_time is not None: @@ -938,7 +919,6 @@ async def start_manual_transfer_runs( request (:class:`google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest`): The request object. A request to start manual transfer runs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -952,7 +932,6 @@ async def start_manual_transfer_runs( """ # Create or coerce a protobuf request object. - request = datatransfer.StartManualTransferRunsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1001,7 +980,6 @@ async def get_transfer_run( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1026,7 +1004,6 @@ async def get_transfer_run( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1039,7 +1016,8 @@ async def get_transfer_run( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -1084,7 +1062,6 @@ async def delete_transfer_run( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1105,7 +1082,6 @@ async def delete_transfer_run( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1118,7 +1094,8 @@ async def delete_transfer_run( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -1164,7 +1141,6 @@ async def list_transfer_runs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1194,7 +1170,6 @@ async def list_transfer_runs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1207,7 +1182,8 @@ async def list_transfer_runs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -1258,7 +1234,6 @@ async def list_transfer_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1288,7 +1263,6 @@ async def list_transfer_logs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1301,7 +1275,8 @@ async def list_transfer_logs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -1363,7 +1338,6 @@ async def check_valid_creds( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1390,7 +1364,6 @@ async def check_valid_creds( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1403,7 +1376,8 @@ async def check_valid_creds( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 4aac13d8..b0dab00f 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -35,12 +33,11 @@ from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataTransferServiceGrpcTransport from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport @@ -63,7 +60,7 @@ class DataTransferServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[DataTransferServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -90,7 +87,8 @@ class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -124,7 +122,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -141,7 +140,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -160,23 +159,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> DataTransferServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - DataTransferServiceTransport: The transport used by the client instance. + DataTransferServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def data_source_path(project: str, data_source: str,) -> str: - """Return a fully-qualified data_source string.""" + """Returns a fully-qualified data_source string.""" return "projects/{project}/dataSources/{data_source}".format( project=project, data_source=data_source, ) @staticmethod def parse_data_source_path(path: str) -> Dict[str, str]: - """Parse a data_source path into its component segments.""" + """Parses a data_source path into its component segments.""" m = re.match( r"^projects/(?P.+?)/dataSources/(?P.+?)$", path ) @@ -184,14 +184,14 @@ def parse_data_source_path(path: str) -> Dict[str, str]: @staticmethod def run_path(project: str, transfer_config: str, run: str,) -> str: - """Return a fully-qualified run string.""" + """Returns a fully-qualified run string.""" return "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format( project=project, transfer_config=transfer_config, run=run, ) @staticmethod def parse_run_path(path: str) -> Dict[str, str]: - """Parse a run path into its component segments.""" + """Parses a run path into its component segments.""" m = re.match( r"^projects/(?P.+?)/transferConfigs/(?P.+?)/runs/(?P.+?)$", path, @@ -200,14 +200,14 @@ def parse_run_path(path: str) -> Dict[str, str]: @staticmethod def transfer_config_path(project: str, transfer_config: str,) -> str: - """Return a fully-qualified transfer_config string.""" + """Returns a fully-qualified transfer_config string.""" return "projects/{project}/transferConfigs/{transfer_config}".format( project=project, transfer_config=transfer_config, ) @staticmethod def parse_transfer_config_path(path: str) -> Dict[str, str]: - """Parse a transfer_config path into its component segments.""" + """Parses a transfer_config path into its component segments.""" m = re.match( r"^projects/(?P.+?)/transferConfigs/(?P.+?)$", path, @@ -216,7 +216,7 @@ def parse_transfer_config_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -229,7 +229,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -240,7 +240,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -251,7 +251,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -262,7 +262,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -276,12 +276,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DataTransferServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the data transfer service client. + """Instantiates the data transfer service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -336,9 +336,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -350,12 +351,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -370,8 +373,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -411,7 +414,6 @@ def get_data_source( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -441,10 +443,8 @@ def get_data_source( # there are no flattened fields. if not isinstance(request, datatransfer.GetDataSourceRequest): request = datatransfer.GetDataSourceRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -489,7 +489,6 @@ def list_data_sources( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -521,10 +520,8 @@ def list_data_sources( # there are no flattened fields. if not isinstance(request, datatransfer.ListDataSourcesRequest): request = datatransfer.ListDataSourcesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -590,7 +587,6 @@ def create_transfer_config( This corresponds to the ``transfer_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -625,10 +621,8 @@ def create_transfer_config( # there are no flattened fields. if not isinstance(request, datatransfer.CreateTransferConfigRequest): request = datatransfer.CreateTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if transfer_config is not None: @@ -655,7 +649,7 @@ def update_transfer_config( request: datatransfer.UpdateTransferConfigRequest = None, *, transfer_config: transfer.TransferConfig = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -683,7 +677,6 @@ def update_transfer_config( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -718,10 +711,8 @@ def update_transfer_config( # there are no flattened fields. if not isinstance(request, datatransfer.UpdateTransferConfigRequest): request = datatransfer.UpdateTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if transfer_config is not None: request.transfer_config = transfer_config if update_mask is not None: @@ -771,7 +762,6 @@ def delete_transfer_config( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -794,10 +784,8 @@ def delete_transfer_config( # there are no flattened fields. if not isinstance(request, datatransfer.DeleteTransferConfigRequest): request = datatransfer.DeleteTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -840,7 +828,6 @@ def get_transfer_config( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -875,10 +862,8 @@ def get_transfer_config( # there are no flattened fields. if not isinstance(request, datatransfer.GetTransferConfigRequest): request = datatransfer.GetTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -922,7 +907,6 @@ def list_transfer_configs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -954,10 +938,8 @@ def list_transfer_configs( # there are no flattened fields. if not isinstance(request, datatransfer.ListTransferConfigsRequest): request = datatransfer.ListTransferConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -988,8 +970,8 @@ def schedule_transfer_runs( request: datatransfer.ScheduleTransferRunsRequest = None, *, parent: str = None, - start_time: timestamp.Timestamp = None, - end_time: timestamp.Timestamp = None, + start_time: timestamp_pb2.Timestamp = None, + end_time: timestamp_pb2.Timestamp = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1026,7 +1008,6 @@ def schedule_transfer_runs( This corresponds to the ``end_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1055,10 +1036,8 @@ def schedule_transfer_runs( # there are no flattened fields. if not isinstance(request, datatransfer.ScheduleTransferRunsRequest): request = datatransfer.ScheduleTransferRunsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if start_time is not None: @@ -1099,7 +1078,6 @@ def start_manual_transfer_runs( request (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest): The request object. A request to start manual transfer runs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1113,7 +1091,6 @@ def start_manual_transfer_runs( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a datatransfer.StartManualTransferRunsRequest. # There's no risk of modifying the input as we've already verified @@ -1165,7 +1142,6 @@ def get_transfer_run( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1192,10 +1168,8 @@ def get_transfer_run( # there are no flattened fields. if not isinstance(request, datatransfer.GetTransferRunRequest): request = datatransfer.GetTransferRunRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1240,7 +1214,6 @@ def delete_transfer_run( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1263,10 +1236,8 @@ def delete_transfer_run( # there are no flattened fields. if not isinstance(request, datatransfer.DeleteTransferRunRequest): request = datatransfer.DeleteTransferRunRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1312,7 +1283,6 @@ def list_transfer_runs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1344,10 +1314,8 @@ def list_transfer_runs( # there are no flattened fields. if not isinstance(request, datatransfer.ListTransferRunsRequest): request = datatransfer.ListTransferRunsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1398,7 +1366,6 @@ def list_transfer_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1430,10 +1397,8 @@ def list_transfer_logs( # there are no flattened fields. if not isinstance(request, datatransfer.ListTransferLogsRequest): request = datatransfer.ListTransferLogsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1495,7 +1460,6 @@ def check_valid_creds( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1524,10 +1488,8 @@ def check_valid_creds( # there are no flattened fields. if not isinstance(request, datatransfer.CheckValidCredsRequest): request = datatransfer.CheckValidCredsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py index 425e8eed..5c238be5 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -118,7 +116,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -246,7 +244,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -374,7 +372,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -502,7 +500,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py index 097e5854..cf114cd1 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py index 104b9bff..830861c5 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -39,27 +38,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class DataTransferServiceTransport(abc.ABC): """Abstract transport class for DataTransferService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "bigquerydatatransfer.googleapis.com" + def __init__( self, *, - host: str = "bigquerydatatransfer.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -68,7 +81,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -82,29 +95,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -115,7 +175,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -129,7 +190,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -153,7 +215,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -167,7 +230,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -181,7 +245,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -205,7 +270,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -219,7 +285,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -233,7 +300,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -247,7 +315,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -261,7 +330,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=20.0, ), @@ -273,22 +343,20 @@ def _prep_wrapped_messages(self, client_info): @property def get_data_source( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.GetDataSourceRequest], - typing.Union[ - datatransfer.DataSource, typing.Awaitable[datatransfer.DataSource] - ], + Union[datatransfer.DataSource, Awaitable[datatransfer.DataSource]], ]: raise NotImplementedError() @property def list_data_sources( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.ListDataSourcesRequest], - typing.Union[ + Union[ datatransfer.ListDataSourcesResponse, - typing.Awaitable[datatransfer.ListDataSourcesResponse], + Awaitable[datatransfer.ListDataSourcesResponse], ], ]: raise NotImplementedError() @@ -296,53 +364,47 @@ def list_data_sources( @property def create_transfer_config( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.CreateTransferConfigRequest], - typing.Union[ - transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] - ], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], ]: raise NotImplementedError() @property def update_transfer_config( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.UpdateTransferConfigRequest], - typing.Union[ - transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] - ], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], ]: raise NotImplementedError() @property def delete_transfer_config( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.DeleteTransferConfigRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_transfer_config( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.GetTransferConfigRequest], - typing.Union[ - transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] - ], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], ]: raise NotImplementedError() @property def list_transfer_configs( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.ListTransferConfigsRequest], - typing.Union[ + Union[ datatransfer.ListTransferConfigsResponse, - typing.Awaitable[datatransfer.ListTransferConfigsResponse], + Awaitable[datatransfer.ListTransferConfigsResponse], ], ]: raise NotImplementedError() @@ -350,11 +412,11 @@ def list_transfer_configs( @property def schedule_transfer_runs( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.ScheduleTransferRunsRequest], - typing.Union[ + Union[ datatransfer.ScheduleTransferRunsResponse, - typing.Awaitable[datatransfer.ScheduleTransferRunsResponse], + Awaitable[datatransfer.ScheduleTransferRunsResponse], ], ]: raise NotImplementedError() @@ -362,11 +424,11 @@ def schedule_transfer_runs( @property def start_manual_transfer_runs( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.StartManualTransferRunsRequest], - typing.Union[ + Union[ datatransfer.StartManualTransferRunsResponse, - typing.Awaitable[datatransfer.StartManualTransferRunsResponse], + Awaitable[datatransfer.StartManualTransferRunsResponse], ], ]: raise NotImplementedError() @@ -374,29 +436,29 @@ def start_manual_transfer_runs( @property def get_transfer_run( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.GetTransferRunRequest], - typing.Union[transfer.TransferRun, typing.Awaitable[transfer.TransferRun]], + Union[transfer.TransferRun, Awaitable[transfer.TransferRun]], ]: raise NotImplementedError() @property def delete_transfer_run( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.DeleteTransferRunRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_transfer_runs( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.ListTransferRunsRequest], - typing.Union[ + Union[ datatransfer.ListTransferRunsResponse, - typing.Awaitable[datatransfer.ListTransferRunsResponse], + Awaitable[datatransfer.ListTransferRunsResponse], ], ]: raise NotImplementedError() @@ -404,11 +466,11 @@ def list_transfer_runs( @property def list_transfer_logs( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.ListTransferLogsRequest], - typing.Union[ + Union[ datatransfer.ListTransferLogsResponse, - typing.Awaitable[datatransfer.ListTransferLogsResponse], + Awaitable[datatransfer.ListTransferLogsResponse], ], ]: raise NotImplementedError() @@ -416,11 +478,11 @@ def list_transfer_logs( @property def check_valid_creds( self, - ) -> typing.Callable[ + ) -> Callable[ [datatransfer.CheckValidCredsRequest], - typing.Union[ + Union[ datatransfer.CheckValidCredsResponse, - typing.Awaitable[datatransfer.CheckValidCredsResponse], + Awaitable[datatransfer.CheckValidCredsResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py index c6c39bb4..9181ae48 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO @@ -55,7 +52,7 @@ def __init__( self, *, host: str = "bigquerydatatransfer.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -69,7 +66,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "bigquerydatatransfer.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -338,7 +338,7 @@ def update_transfer_config( @property def delete_transfer_config( self, - ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty.Empty]: + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]: r"""Return a callable for the delete transfer config method over gRPC. Deletes a data transfer configuration, @@ -358,7 +358,7 @@ def delete_transfer_config( self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_transfer_config"] @@ -513,7 +513,7 @@ def get_transfer_run( @property def delete_transfer_run( self, - ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty.Empty]: + ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty_pb2.Empty]: r"""Return a callable for the delete transfer run method over gRPC. Deletes the specified transfer run. @@ -532,7 +532,7 @@ def delete_transfer_run( self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", request_serializer=datatransfer.DeleteTransferRunRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_transfer_run"] diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py index 00cb6fb6..2cd986db 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO from .grpc import DataTransferServiceGrpcTransport @@ -58,7 +55,7 @@ class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport): def create_channel( cls, host: str = "bigquerydatatransfer.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -85,13 +82,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -99,7 +98,7 @@ def __init__( self, *, host: str = "bigquerydatatransfer.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -113,7 +112,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -349,7 +348,9 @@ def update_transfer_config( @property def delete_transfer_config( self, - ) -> Callable[[datatransfer.DeleteTransferConfigRequest], Awaitable[empty.Empty]]: + ) -> Callable[ + [datatransfer.DeleteTransferConfigRequest], Awaitable[empty_pb2.Empty] + ]: r"""Return a callable for the delete transfer config method over gRPC. Deletes a data transfer configuration, @@ -369,7 +370,7 @@ def delete_transfer_config( self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_transfer_config"] @@ -528,7 +529,7 @@ def get_transfer_run( @property def delete_transfer_run( self, - ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty.Empty]]: + ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete transfer run method over gRPC. Deletes the specified transfer run. @@ -547,7 +548,7 @@ def delete_transfer_run( self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", request_serializer=datatransfer.DeleteTransferRunRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_transfer_run"] diff --git a/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/google/cloud/bigquery_datatransfer_v1/types/__init__.py index b886143f..b79fc3ff 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .datatransfer import ( CheckValidCredsRequest, CheckValidCredsResponse, diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 9b92f388..d9869c0d 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.protobuf import wrappers_pb2 as wrappers # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore __protobuf__ = proto.module( @@ -116,39 +113,24 @@ class Type(proto.Enum): RECORD = 5 PLUS_PAGE = 6 - param_id = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + param_id = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) type_ = proto.Field(proto.ENUM, number=4, enum=Type,) - - required = proto.Field(proto.BOOL, number=5) - - repeated = proto.Field(proto.BOOL, number=6) - - validation_regex = proto.Field(proto.STRING, number=7) - - allowed_values = proto.RepeatedField(proto.STRING, number=8) - - min_value = proto.Field(proto.MESSAGE, number=9, message=wrappers.DoubleValue,) - - max_value = proto.Field(proto.MESSAGE, number=10, message=wrappers.DoubleValue,) - + required = proto.Field(proto.BOOL, number=5,) + repeated = proto.Field(proto.BOOL, number=6,) + validation_regex = proto.Field(proto.STRING, number=7,) + allowed_values = proto.RepeatedField(proto.STRING, number=8,) + min_value = proto.Field(proto.MESSAGE, number=9, message=wrappers_pb2.DoubleValue,) + max_value = proto.Field(proto.MESSAGE, number=10, message=wrappers_pb2.DoubleValue,) fields = proto.RepeatedField( proto.MESSAGE, number=11, message="DataSourceParameter", ) - - validation_description = proto.Field(proto.STRING, number=12) - - validation_help_url = proto.Field(proto.STRING, number=13) - - immutable = proto.Field(proto.BOOL, number=14) - - recurse = proto.Field(proto.BOOL, number=15) - - deprecated = proto.Field(proto.BOOL, number=20) + validation_description = proto.Field(proto.STRING, number=12,) + validation_help_url = proto.Field(proto.STRING, number=13,) + immutable = proto.Field(proto.BOOL, number=14,) + recurse = proto.Field(proto.BOOL, number=15,) + deprecated = proto.Field(proto.BOOL, number=20,) class DataSource(proto.Message): @@ -228,50 +210,32 @@ class DataRefreshType(proto.Enum): SLIDING_WINDOW = 1 CUSTOM_SLIDING_WINDOW = 2 - name = proto.Field(proto.STRING, number=1) - - data_source_id = proto.Field(proto.STRING, number=2) - - display_name = proto.Field(proto.STRING, number=3) - - description = proto.Field(proto.STRING, number=4) - - client_id = proto.Field(proto.STRING, number=5) - - scopes = proto.RepeatedField(proto.STRING, number=6) - + name = proto.Field(proto.STRING, number=1,) + data_source_id = proto.Field(proto.STRING, number=2,) + display_name = proto.Field(proto.STRING, number=3,) + description = proto.Field(proto.STRING, number=4,) + client_id = proto.Field(proto.STRING, number=5,) + scopes = proto.RepeatedField(proto.STRING, number=6,) transfer_type = proto.Field(proto.ENUM, number=7, enum=transfer.TransferType,) - - supports_multiple_transfers = proto.Field(proto.BOOL, number=8) - - update_deadline_seconds = proto.Field(proto.INT32, number=9) - - default_schedule = proto.Field(proto.STRING, number=10) - - supports_custom_schedule = proto.Field(proto.BOOL, number=11) - + supports_multiple_transfers = proto.Field(proto.BOOL, number=8,) + update_deadline_seconds = proto.Field(proto.INT32, number=9,) + default_schedule = proto.Field(proto.STRING, number=10,) + supports_custom_schedule = proto.Field(proto.BOOL, number=11,) parameters = proto.RepeatedField( proto.MESSAGE, number=12, message="DataSourceParameter", ) - - help_url = proto.Field(proto.STRING, number=13) - + help_url = proto.Field(proto.STRING, number=13,) authorization_type = proto.Field(proto.ENUM, number=14, enum=AuthorizationType,) - data_refresh_type = proto.Field(proto.ENUM, number=15, enum=DataRefreshType,) - - default_data_refresh_window_days = proto.Field(proto.INT32, number=16) - - manual_runs_disabled = proto.Field(proto.BOOL, number=17) - + default_data_refresh_window_days = proto.Field(proto.INT32, number=16,) + manual_runs_disabled = proto.Field(proto.BOOL, number=17,) minimum_schedule_interval = proto.Field( - proto.MESSAGE, number=18, message=duration.Duration, + proto.MESSAGE, number=18, message=duration_pb2.Duration, ) class GetDataSourceRequest(proto.Message): r"""A request to get data source info. - Attributes: name (str): Required. The field will contain name of the resource @@ -280,7 +244,7 @@ class GetDataSourceRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListDataSourcesRequest(proto.Message): @@ -304,16 +268,13 @@ class ListDataSourcesRequest(proto.Message): maximum value of 1000 results. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) class ListDataSourcesResponse(proto.Message): r"""Returns list of supported data sources and their metadata. - Attributes: data_sources (Sequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]): List of supported data sources and their @@ -330,8 +291,7 @@ def raw_page(self): return self data_sources = proto.RepeatedField(proto.MESSAGE, number=1, message="DataSource",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateTransferConfigRequest(proto.Message): @@ -389,17 +349,13 @@ class CreateTransferConfigRequest(proto.Message): permissions to act as this service account. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) transfer_config = proto.Field( proto.MESSAGE, number=2, message=transfer.TransferConfig, ) - - authorization_code = proto.Field(proto.STRING, number=3) - - version_info = proto.Field(proto.STRING, number=5) - - service_account_name = proto.Field(proto.STRING, number=6) + authorization_code = proto.Field(proto.STRING, number=3,) + version_info = proto.Field(proto.STRING, number=5,) + service_account_name = proto.Field(proto.STRING, number=6,) class UpdateTransferConfigRequest(proto.Message): @@ -453,19 +409,16 @@ class UpdateTransferConfigRequest(proto.Message): transfer_config = proto.Field( proto.MESSAGE, number=1, message=transfer.TransferConfig, ) - - authorization_code = proto.Field(proto.STRING, number=3) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) - - version_info = proto.Field(proto.STRING, number=5) - - service_account_name = proto.Field(proto.STRING, number=6) + authorization_code = proto.Field(proto.STRING, number=3,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) + version_info = proto.Field(proto.STRING, number=5,) + service_account_name = proto.Field(proto.STRING, number=6,) class GetTransferConfigRequest(proto.Message): r"""A request to get data transfer information. - Attributes: name (str): Required. The field will contain name of the resource @@ -474,7 +427,7 @@ class GetTransferConfigRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteTransferConfigRequest(proto.Message): @@ -489,12 +442,11 @@ class DeleteTransferConfigRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetTransferRunRequest(proto.Message): r"""A request to get data transfer run information. - Attributes: name (str): Required. The field will contain name of the resource @@ -504,12 +456,11 @@ class GetTransferRunRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteTransferRunRequest(proto.Message): r"""A request to delete data transfer run information. - Attributes: name (str): Required. The field will contain name of the resource @@ -519,7 +470,7 @@ class DeleteTransferRunRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListTransferConfigsRequest(proto.Message): @@ -545,18 +496,14 @@ class ListTransferConfigsRequest(proto.Message): maximum value of 1000 results. """ - parent = proto.Field(proto.STRING, number=1) - - data_source_ids = proto.RepeatedField(proto.STRING, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) + parent = proto.Field(proto.STRING, number=1,) + data_source_ids = proto.RepeatedField(proto.STRING, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) class ListTransferConfigsResponse(proto.Message): r"""The returned list of pipelines in the project. - Attributes: transfer_configs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Output only. The stored pipeline transfer @@ -575,8 +522,7 @@ def raw_page(self): transfer_configs = proto.RepeatedField( proto.MESSAGE, number=1, message=transfer.TransferConfig, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListTransferRunsRequest(proto.Message): @@ -613,20 +559,15 @@ class RunAttempt(proto.Enum): RUN_ATTEMPT_UNSPECIFIED = 0 LATEST = 1 - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) states = proto.RepeatedField(proto.ENUM, number=2, enum=transfer.TransferState,) - - page_token = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) - + page_token = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) run_attempt = proto.Field(proto.ENUM, number=5, enum=RunAttempt,) class ListTransferRunsResponse(proto.Message): r"""The returned list of pipelines in the project. - Attributes: transfer_runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): Output only. The stored pipeline transfer @@ -645,8 +586,7 @@ def raw_page(self): transfer_runs = proto.RepeatedField( proto.MESSAGE, number=1, message=transfer.TransferRun, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListTransferLogsRequest(proto.Message): @@ -674,12 +614,9 @@ class ListTransferLogsRequest(proto.Message): INFO, WARNING and ERROR messages are returned. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=4) - - page_size = proto.Field(proto.INT32, number=5) - + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=4,) + page_size = proto.Field(proto.INT32, number=5,) message_types = proto.RepeatedField( proto.ENUM, number=6, enum=transfer.TransferMessage.MessageSeverity, ) @@ -687,7 +624,6 @@ class ListTransferLogsRequest(proto.Message): class ListTransferLogsResponse(proto.Message): r"""The returned list transfer run messages. - Attributes: transfer_messages (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]): Output only. The stored pipeline transfer @@ -706,8 +642,7 @@ def raw_page(self): transfer_messages = proto.RepeatedField( proto.MESSAGE, number=1, message=transfer.TransferMessage, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CheckValidCredsRequest(proto.Message): @@ -726,7 +661,7 @@ class CheckValidCredsRequest(proto.Message): ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CheckValidCredsResponse(proto.Message): @@ -738,12 +673,11 @@ class CheckValidCredsResponse(proto.Message): If set to ``true``, the credentials exist and are valid. """ - has_valid_creds = proto.Field(proto.BOOL, number=1) + has_valid_creds = proto.Field(proto.BOOL, number=1,) class ScheduleTransferRunsRequest(proto.Message): r"""A request to schedule transfer runs for a time range. - Attributes: parent (str): Required. Transfer configuration name in the form: @@ -757,16 +691,13 @@ class ScheduleTransferRunsRequest(proto.Message): example, ``"2017-05-30T00:00:00+00:00"``. """ - parent = proto.Field(proto.STRING, number=1) - - start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + parent = proto.Field(proto.STRING, number=1,) + start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) class ScheduleTransferRunsResponse(proto.Message): r"""A response to schedule transfer runs for a time range. - Attributes: runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): The transfer runs that were scheduled. @@ -777,7 +708,6 @@ class ScheduleTransferRunsResponse(proto.Message): class StartManualTransferRunsRequest(proto.Message): r"""A request to start manual transfer runs. - Attributes: parent (str): Transfer configuration name in the form: @@ -810,24 +740,24 @@ class TimeRange(proto.Message): range betwen start_time (inclusive) and end_time (exlusive). """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - - parent = proto.Field(proto.STRING, number=1) + start_time = proto.Field( + proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, + ) + parent = proto.Field(proto.STRING, number=1,) requested_time_range = proto.Field( proto.MESSAGE, number=3, oneof="time", message=TimeRange, ) - requested_run_time = proto.Field( - proto.MESSAGE, number=4, oneof="time", message=timestamp.Timestamp, + proto.MESSAGE, number=4, oneof="time", message=timestamp_pb2.Timestamp, ) class StartManualTransferRunsResponse(proto.Message): r"""A response to start manual transfer runs. - Attributes: runs (Sequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): The transfer runs that were created. diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 63274fab..5e04fc7d 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -65,12 +62,11 @@ class EmailPreferences(proto.Message): transfer run failures. """ - enable_failure_email = proto.Field(proto.BOOL, number=1) + enable_failure_email = proto.Field(proto.BOOL, number=1,) class ScheduleOptions(proto.Message): r"""Options customizing the data transfer schedule. - Attributes: disable_auto_scheduling (bool): If true, automatic scheduling of data @@ -96,11 +92,9 @@ class ScheduleOptions(proto.Message): option. """ - disable_auto_scheduling = proto.Field(proto.BOOL, number=3) - - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + disable_auto_scheduling = proto.Field(proto.BOOL, number=3,) + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) class TransferConfig(proto.Message): @@ -181,36 +175,23 @@ class TransferConfig(proto.Message): user who owns this transfer config. """ - name = proto.Field(proto.STRING, number=1) - - destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination") - - display_name = proto.Field(proto.STRING, number=3) - - data_source_id = proto.Field(proto.STRING, number=5) - - params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,) - - schedule = proto.Field(proto.STRING, number=7) - + name = proto.Field(proto.STRING, number=1,) + destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",) + display_name = proto.Field(proto.STRING, number=3,) + data_source_id = proto.Field(proto.STRING, number=5,) + params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,) + schedule = proto.Field(proto.STRING, number=7,) schedule_options = proto.Field(proto.MESSAGE, number=24, message="ScheduleOptions",) - - data_refresh_window_days = proto.Field(proto.INT32, number=12) - - disabled = proto.Field(proto.BOOL, number=13) - - update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - next_run_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) - + data_refresh_window_days = proto.Field(proto.INT32, number=12,) + disabled = proto.Field(proto.BOOL, number=13,) + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + next_run_time = proto.Field( + proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, + ) state = proto.Field(proto.ENUM, number=10, enum="TransferState",) - - user_id = proto.Field(proto.INT64, number=11) - - dataset_region = proto.Field(proto.STRING, number=14) - - notification_pubsub_topic = proto.Field(proto.STRING, number=15) - + user_id = proto.Field(proto.INT64, number=11,) + dataset_region = proto.Field(proto.STRING, number=14,) + notification_pubsub_topic = proto.Field(proto.STRING, number=15,) email_preferences = proto.Field( proto.MESSAGE, number=18, message="EmailPreferences", ) @@ -218,7 +199,6 @@ class TransferConfig(proto.Message): class TransferRun(proto.Message): r"""Represents a data transfer run. - Attributes: name (str): The resource name of the transfer run. Transfer run names @@ -274,34 +254,22 @@ class TransferRun(proto.Message): this run was derived from. """ - name = proto.Field(proto.STRING, number=1) - - schedule_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - - error_status = proto.Field(proto.MESSAGE, number=21, message=status.Status,) - - start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) - - params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,) - - destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination") - - data_source_id = proto.Field(proto.STRING, number=7) - + name = proto.Field(proto.STRING, number=1,) + schedule_time = proto.Field( + proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, + ) + run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,) + error_status = proto.Field(proto.MESSAGE, number=21, message=status_pb2.Status,) + start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,) + destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",) + data_source_id = proto.Field(proto.STRING, number=7,) state = proto.Field(proto.ENUM, number=8, enum="TransferState",) - - user_id = proto.Field(proto.INT64, number=11) - - schedule = proto.Field(proto.STRING, number=12) - - notification_pubsub_topic = proto.Field(proto.STRING, number=23) - + user_id = proto.Field(proto.INT64, number=11,) + schedule = proto.Field(proto.STRING, number=12,) + notification_pubsub_topic = proto.Field(proto.STRING, number=23,) email_preferences = proto.Field( proto.MESSAGE, number=25, message="EmailPreferences", ) @@ -327,11 +295,11 @@ class MessageSeverity(proto.Enum): WARNING = 2 ERROR = 3 - message_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) - + message_time = proto.Field( + proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, + ) severity = proto.Field(proto.ENUM, number=2, enum=MessageSeverity,) - - message_text = proto.Field(proto.STRING, number=3) + message_text = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owlbot.py b/owlbot.py index 87e7c517..2281a7a8 100644 --- a/owlbot.py +++ b/owlbot.py @@ -26,17 +26,18 @@ # Generate bigquery_datatransfer GAPIC layer # ---------------------------------------------------------------------------- for library in s.get_staging_dirs("v1"): - # Fix missing async client in datatransfer_v1 - # https://github.com/googleapis/gapic-generator-python/issues/815 + + # Comment out broken assertion in unit test + # https://github.com/googleapis/gapic-generator-python/issues/897 s.replace( - library / "google/cloud/bigquery_datatransfer_v1/__init__.py", - r"from \.services\.data_transfer_service import DataTransferServiceClient", - "\\g<0>\nfrom .services.data_transfer_service import DataTransferServiceAsyncClient", + library / "tests/**/*.py", + "assert args\[0\]\.start_time == timestamp_pb2\.Timestamp\(seconds=751\)", + "# assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751)" ) s.replace( - library / "google/cloud/bigquery_datatransfer_v1/__init__.py", - r"'DataTransferServiceClient',", - '\\g<0>\n "DataTransferServiceAsyncClient"', + library / "tests/**/*.py", + "assert args\[0\]\.end_time == timestamp_pb2\.Timestamp\(seconds=751\)", + "# assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751)" ) s.move(library, excludes=["*.tar.gz", "docs/index.rst", "README.rst", "setup.py"]) diff --git a/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/scripts/fixup_bigquery_datatransfer_v1_keywords.py index c32d8022..f7746ab5 100644 --- a/scripts/fixup_bigquery_datatransfer_v1_keywords.py +++ b/scripts/fixup_bigquery_datatransfer_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,21 +39,20 @@ def partition( class bigquery_datatransferCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'check_valid_creds': ('name', ), - 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), - 'delete_transfer_config': ('name', ), - 'delete_transfer_run': ('name', ), - 'get_data_source': ('name', ), - 'get_transfer_config': ('name', ), - 'get_transfer_run': ('name', ), - 'list_data_sources': ('parent', 'page_token', 'page_size', ), - 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ), - 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ), - 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ), - 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ), - 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ), - 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ), - + 'check_valid_creds': ('name', ), + 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), + 'delete_transfer_config': ('name', ), + 'delete_transfer_run': ('name', ), + 'get_data_source': ('name', ), + 'get_transfer_config': ('name', ), + 'get_transfer_run': ('name', ), + 'list_data_sources': ('parent', 'page_token', 'page_size', ), + 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ), + 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ), + 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ), + 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ), + 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ), + 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -86,7 +83,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index 63033cb8..facd57e4 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ dependencies = ( "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.15.0", + "packaging >= 14.3", ) extras = {"libcst": "libcst >= 0.2.5"} diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 28be0fe2..e6739c3e 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -8,3 +8,5 @@ google-api-core==1.22.2 proto-plus==1.15.0 libcst==0.2.5 +packaging==14.3 +google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is transitively required through google-api-core diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py b/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 345a6b56..cf70ccc9 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import ( DataTransferServiceAsyncClient, @@ -42,14 +41,44 @@ from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import ( transports, ) +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.bigquery_datatransfer_v1.types import datatransfer from google.cloud.bigquery_datatransfer_v1.types import transfer from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -101,7 +130,7 @@ def test__get_default_mtls_endpoint(): "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,] ) def test_data_transfer_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -118,7 +147,7 @@ def test_data_transfer_service_client_from_service_account_info(client_class): "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient,] ) def test_data_transfer_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -175,7 +204,7 @@ def test_data_transfer_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -481,7 +510,7 @@ def test_get_data_source( transport: str = "grpc", request_type=datatransfer.GetDataSourceRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -509,55 +538,36 @@ def test_get_data_source( default_data_refresh_window_days=3379, manual_runs_disabled=True, ) - response = client.get_data_source(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetDataSourceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.DataSource) - assert response.name == "name_value" - assert response.data_source_id == "data_source_id_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.client_id == "client_id_value" - assert response.scopes == ["scopes_value"] - assert response.transfer_type == transfer.TransferType.BATCH - assert response.supports_multiple_transfers is True - assert response.update_deadline_seconds == 2406 - assert response.default_schedule == "default_schedule_value" - assert response.supports_custom_schedule is True - assert response.help_url == "help_url_value" - assert ( response.authorization_type == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE ) - assert ( response.data_refresh_type == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW ) - assert response.default_data_refresh_window_days == 3379 - assert response.manual_runs_disabled is True @@ -569,7 +579,7 @@ def test_get_data_source_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -577,7 +587,6 @@ def test_get_data_source_empty_call(): client.get_data_source() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetDataSourceRequest() @@ -586,7 +595,7 @@ async def test_get_data_source_async( transport: str = "grpc_asyncio", request_type=datatransfer.GetDataSourceRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -616,54 +625,36 @@ async def test_get_data_source_async( manual_runs_disabled=True, ) ) - response = await client.get_data_source(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetDataSourceRequest() # Establish that the response is the type that we expect. assert isinstance(response, datatransfer.DataSource) - assert response.name == "name_value" - assert response.data_source_id == "data_source_id_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.client_id == "client_id_value" - assert response.scopes == ["scopes_value"] - assert response.transfer_type == transfer.TransferType.BATCH - assert response.supports_multiple_transfers is True - assert response.update_deadline_seconds == 2406 - assert response.default_schedule == "default_schedule_value" - assert response.supports_custom_schedule is True - assert response.help_url == "help_url_value" - assert ( response.authorization_type == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE ) - assert ( response.data_refresh_type == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW ) - assert response.default_data_refresh_window_days == 3379 - assert response.manual_runs_disabled is True @@ -673,17 +664,19 @@ async def test_get_data_source_async_from_dict(): def test_get_data_source_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetDataSourceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: call.return_value = datatransfer.DataSource() - client.get_data_source(request) # Establish that the underlying gRPC stub method was called. @@ -699,12 +692,13 @@ def test_get_data_source_field_headers(): @pytest.mark.asyncio async def test_get_data_source_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetDataSourceRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -712,7 +706,6 @@ async def test_get_data_source_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.DataSource() ) - await client.get_data_source(request) # Establish that the underlying gRPC stub method was called. @@ -726,13 +719,14 @@ async def test_get_data_source_field_headers_async(): def test_get_data_source_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.DataSource() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_data_source(name="name_value",) @@ -741,12 +735,13 @@ def test_get_data_source_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_data_source_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -759,7 +754,7 @@ def test_get_data_source_flattened_error(): @pytest.mark.asyncio async def test_get_data_source_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -778,14 +773,13 @@ async def test_get_data_source_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_data_source_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -800,7 +794,7 @@ def test_list_data_sources( transport: str = "grpc", request_type=datatransfer.ListDataSourcesRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -815,19 +809,15 @@ def test_list_data_sources( call.return_value = datatransfer.ListDataSourcesResponse( next_page_token="next_page_token_value", ) - response = client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListDataSourcesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == "next_page_token_value" @@ -839,7 +829,7 @@ def test_list_data_sources_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -849,7 +839,6 @@ def test_list_data_sources_empty_call(): client.list_data_sources() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListDataSourcesRequest() @@ -858,7 +847,7 @@ async def test_list_data_sources_async( transport: str = "grpc_asyncio", request_type=datatransfer.ListDataSourcesRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -875,18 +864,15 @@ async def test_list_data_sources_async( next_page_token="next_page_token_value", ) ) - response = await client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListDataSourcesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDataSourcesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -896,11 +882,14 @@ async def test_list_data_sources_async_from_dict(): def test_list_data_sources_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListDataSourcesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -908,7 +897,6 @@ def test_list_data_sources_field_headers(): type(client.transport.list_data_sources), "__call__" ) as call: call.return_value = datatransfer.ListDataSourcesResponse() - client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. @@ -924,12 +912,13 @@ def test_list_data_sources_field_headers(): @pytest.mark.asyncio async def test_list_data_sources_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListDataSourcesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -939,7 +928,6 @@ async def test_list_data_sources_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ListDataSourcesResponse() ) - await client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. @@ -953,7 +941,9 @@ async def test_list_data_sources_field_headers_async(): def test_list_data_sources_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -961,7 +951,6 @@ def test_list_data_sources_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ListDataSourcesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_data_sources(parent="parent_value",) @@ -970,12 +959,13 @@ def test_list_data_sources_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_data_sources_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -988,7 +978,7 @@ def test_list_data_sources_flattened_error(): @pytest.mark.asyncio async def test_list_data_sources_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1009,14 +999,13 @@ async def test_list_data_sources_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_data_sources_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1028,7 +1017,7 @@ async def test_list_data_sources_flattened_error_async(): def test_list_data_sources_pager(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1070,7 +1059,7 @@ def test_list_data_sources_pager(): def test_list_data_sources_pages(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1105,7 +1094,7 @@ def test_list_data_sources_pages(): @pytest.mark.asyncio async def test_list_data_sources_async_pager(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1148,7 +1137,7 @@ async def test_list_data_sources_async_pager(): @pytest.mark.asyncio async def test_list_data_sources_async_pages(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1189,7 +1178,7 @@ def test_create_transfer_config( transport: str = "grpc", request_type=datatransfer.CreateTransferConfigRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1214,37 +1203,24 @@ def test_create_transfer_config( notification_pubsub_topic="notification_pubsub_topic_value", destination_dataset_id="destination_dataset_id_value", ) - response = client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CreateTransferConfigRequest() # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -1256,7 +1232,7 @@ def test_create_transfer_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1266,7 +1242,6 @@ def test_create_transfer_config_empty_call(): client.create_transfer_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CreateTransferConfigRequest() @@ -1276,7 +1251,7 @@ async def test_create_transfer_config_async( request_type=datatransfer.CreateTransferConfigRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1302,36 +1277,24 @@ async def test_create_transfer_config_async( notification_pubsub_topic="notification_pubsub_topic_value", ) ) - response = await client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CreateTransferConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -1341,11 +1304,14 @@ async def test_create_transfer_config_async_from_dict(): def test_create_transfer_config_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.CreateTransferConfigRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1353,7 +1319,6 @@ def test_create_transfer_config_field_headers(): type(client.transport.create_transfer_config), "__call__" ) as call: call.return_value = transfer.TransferConfig() - client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1369,12 +1334,13 @@ def test_create_transfer_config_field_headers(): @pytest.mark.asyncio async def test_create_transfer_config_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.CreateTransferConfigRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1384,7 +1350,6 @@ async def test_create_transfer_config_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( transfer.TransferConfig() ) - await client.create_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1398,7 +1363,9 @@ async def test_create_transfer_config_field_headers_async(): def test_create_transfer_config_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1406,7 +1373,6 @@ def test_create_transfer_config_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_transfer_config( @@ -1418,14 +1384,14 @@ def test_create_transfer_config_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].transfer_config == transfer.TransferConfig(name="name_value") def test_create_transfer_config_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1440,7 +1406,7 @@ def test_create_transfer_config_flattened_error(): @pytest.mark.asyncio async def test_create_transfer_config_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1464,16 +1430,14 @@ async def test_create_transfer_config_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].transfer_config == transfer.TransferConfig(name="name_value") @pytest.mark.asyncio async def test_create_transfer_config_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1490,7 +1454,7 @@ def test_update_transfer_config( transport: str = "grpc", request_type=datatransfer.UpdateTransferConfigRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1515,37 +1479,24 @@ def test_update_transfer_config( notification_pubsub_topic="notification_pubsub_topic_value", destination_dataset_id="destination_dataset_id_value", ) - response = client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.UpdateTransferConfigRequest() # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -1557,7 +1508,7 @@ def test_update_transfer_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1567,7 +1518,6 @@ def test_update_transfer_config_empty_call(): client.update_transfer_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.UpdateTransferConfigRequest() @@ -1577,7 +1527,7 @@ async def test_update_transfer_config_async( request_type=datatransfer.UpdateTransferConfigRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1603,36 +1553,24 @@ async def test_update_transfer_config_async( notification_pubsub_topic="notification_pubsub_topic_value", ) ) - response = await client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.UpdateTransferConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -1642,11 +1580,14 @@ async def test_update_transfer_config_async_from_dict(): def test_update_transfer_config_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.UpdateTransferConfigRequest() + request.transfer_config.name = "transfer_config.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1654,7 +1595,6 @@ def test_update_transfer_config_field_headers(): type(client.transport.update_transfer_config), "__call__" ) as call: call.return_value = transfer.TransferConfig() - client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1673,12 +1613,13 @@ def test_update_transfer_config_field_headers(): @pytest.mark.asyncio async def test_update_transfer_config_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.UpdateTransferConfigRequest() + request.transfer_config.name = "transfer_config.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1688,7 +1629,6 @@ async def test_update_transfer_config_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( transfer.TransferConfig() ) - await client.update_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1705,7 +1645,9 @@ async def test_update_transfer_config_field_headers_async(): def test_update_transfer_config_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1713,26 +1655,25 @@ def test_update_transfer_config_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_transfer_config( transfer_config=transfer.TransferConfig(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].transfer_config == transfer.TransferConfig(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_transfer_config_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1740,14 +1681,14 @@ def test_update_transfer_config_flattened_error(): client.update_transfer_config( datatransfer.UpdateTransferConfigRequest(), transfer_config=transfer.TransferConfig(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_transfer_config_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1764,23 +1705,21 @@ async def test_update_transfer_config_flattened_async(): # using the keyword arguments to the method. response = await client.update_transfer_config( transfer_config=transfer.TransferConfig(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].transfer_config == transfer.TransferConfig(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_transfer_config_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1789,7 +1728,7 @@ async def test_update_transfer_config_flattened_error_async(): await client.update_transfer_config( datatransfer.UpdateTransferConfigRequest(), transfer_config=transfer.TransferConfig(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1797,7 +1736,7 @@ def test_delete_transfer_config( transport: str = "grpc", request_type=datatransfer.DeleteTransferConfigRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1810,13 +1749,11 @@ def test_delete_transfer_config( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferConfigRequest() # Establish that the response is the type that we expect. @@ -1831,7 +1768,7 @@ def test_delete_transfer_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1841,7 +1778,6 @@ def test_delete_transfer_config_empty_call(): client.delete_transfer_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferConfigRequest() @@ -1851,7 +1787,7 @@ async def test_delete_transfer_config_async( request_type=datatransfer.DeleteTransferConfigRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1864,13 +1800,11 @@ async def test_delete_transfer_config_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferConfigRequest() # Establish that the response is the type that we expect. @@ -1883,11 +1817,14 @@ async def test_delete_transfer_config_async_from_dict(): def test_delete_transfer_config_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.DeleteTransferConfigRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1895,7 +1832,6 @@ def test_delete_transfer_config_field_headers(): type(client.transport.delete_transfer_config), "__call__" ) as call: call.return_value = None - client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1911,12 +1847,13 @@ def test_delete_transfer_config_field_headers(): @pytest.mark.asyncio async def test_delete_transfer_config_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.DeleteTransferConfigRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1924,7 +1861,6 @@ async def test_delete_transfer_config_field_headers_async(): type(client.transport.delete_transfer_config), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -1938,7 +1874,9 @@ async def test_delete_transfer_config_field_headers_async(): def test_delete_transfer_config_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1946,7 +1884,6 @@ def test_delete_transfer_config_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_transfer_config(name="name_value",) @@ -1955,12 +1892,13 @@ def test_delete_transfer_config_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_transfer_config_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1973,7 +1911,7 @@ def test_delete_transfer_config_flattened_error(): @pytest.mark.asyncio async def test_delete_transfer_config_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1992,14 +1930,13 @@ async def test_delete_transfer_config_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_transfer_config_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2014,7 +1951,7 @@ def test_get_transfer_config( transport: str = "grpc", request_type=datatransfer.GetTransferConfigRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2039,37 +1976,24 @@ def test_get_transfer_config( notification_pubsub_topic="notification_pubsub_topic_value", destination_dataset_id="destination_dataset_id_value", ) - response = client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferConfigRequest() # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -2081,7 +2005,7 @@ def test_get_transfer_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2091,7 +2015,6 @@ def test_get_transfer_config_empty_call(): client.get_transfer_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferConfigRequest() @@ -2100,7 +2023,7 @@ async def test_get_transfer_config_async( transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferConfigRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2126,36 +2049,24 @@ async def test_get_transfer_config_async( notification_pubsub_topic="notification_pubsub_topic_value", ) ) - response = await client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, transfer.TransferConfig) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.data_source_id == "data_source_id_value" - assert response.schedule == "schedule_value" - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == "dataset_region_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -2165,11 +2076,14 @@ async def test_get_transfer_config_async_from_dict(): def test_get_transfer_config_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetTransferConfigRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2177,7 +2091,6 @@ def test_get_transfer_config_field_headers(): type(client.transport.get_transfer_config), "__call__" ) as call: call.return_value = transfer.TransferConfig() - client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -2193,12 +2106,13 @@ def test_get_transfer_config_field_headers(): @pytest.mark.asyncio async def test_get_transfer_config_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetTransferConfigRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2208,7 +2122,6 @@ async def test_get_transfer_config_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( transfer.TransferConfig() ) - await client.get_transfer_config(request) # Establish that the underlying gRPC stub method was called. @@ -2222,7 +2135,9 @@ async def test_get_transfer_config_field_headers_async(): def test_get_transfer_config_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2230,7 +2145,6 @@ def test_get_transfer_config_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_transfer_config(name="name_value",) @@ -2239,12 +2153,13 @@ def test_get_transfer_config_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_transfer_config_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2257,7 +2172,7 @@ def test_get_transfer_config_flattened_error(): @pytest.mark.asyncio async def test_get_transfer_config_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2278,14 +2193,13 @@ async def test_get_transfer_config_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_transfer_config_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2300,7 +2214,7 @@ def test_list_transfer_configs( transport: str = "grpc", request_type=datatransfer.ListTransferConfigsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2315,19 +2229,15 @@ def test_list_transfer_configs( call.return_value = datatransfer.ListTransferConfigsResponse( next_page_token="next_page_token_value", ) - response = client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferConfigsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferConfigsPager) - assert response.next_page_token == "next_page_token_value" @@ -2339,7 +2249,7 @@ def test_list_transfer_configs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2349,7 +2259,6 @@ def test_list_transfer_configs_empty_call(): client.list_transfer_configs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferConfigsRequest() @@ -2359,7 +2268,7 @@ async def test_list_transfer_configs_async( request_type=datatransfer.ListTransferConfigsRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2376,18 +2285,15 @@ async def test_list_transfer_configs_async( next_page_token="next_page_token_value", ) ) - response = await client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferConfigsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTransferConfigsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2397,11 +2303,14 @@ async def test_list_transfer_configs_async_from_dict(): def test_list_transfer_configs_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferConfigsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2409,7 +2318,6 @@ def test_list_transfer_configs_field_headers(): type(client.transport.list_transfer_configs), "__call__" ) as call: call.return_value = datatransfer.ListTransferConfigsResponse() - client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. @@ -2425,12 +2333,13 @@ def test_list_transfer_configs_field_headers(): @pytest.mark.asyncio async def test_list_transfer_configs_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferConfigsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2440,7 +2349,6 @@ async def test_list_transfer_configs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ListTransferConfigsResponse() ) - await client.list_transfer_configs(request) # Establish that the underlying gRPC stub method was called. @@ -2454,7 +2362,9 @@ async def test_list_transfer_configs_field_headers_async(): def test_list_transfer_configs_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2462,7 +2372,6 @@ def test_list_transfer_configs_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ListTransferConfigsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_transfer_configs(parent="parent_value",) @@ -2471,12 +2380,13 @@ def test_list_transfer_configs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_transfer_configs_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2489,7 +2399,7 @@ def test_list_transfer_configs_flattened_error(): @pytest.mark.asyncio async def test_list_transfer_configs_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2510,14 +2420,13 @@ async def test_list_transfer_configs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_transfer_configs_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2529,7 +2438,7 @@ async def test_list_transfer_configs_flattened_error_async(): def test_list_transfer_configs_pager(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2574,7 +2483,7 @@ def test_list_transfer_configs_pager(): def test_list_transfer_configs_pages(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2612,7 +2521,7 @@ def test_list_transfer_configs_pages(): @pytest.mark.asyncio async def test_list_transfer_configs_async_pager(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2658,7 +2567,7 @@ async def test_list_transfer_configs_async_pager(): @pytest.mark.asyncio async def test_list_transfer_configs_async_pages(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2702,7 +2611,7 @@ def test_schedule_transfer_runs( transport: str = "grpc", request_type=datatransfer.ScheduleTransferRunsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2715,17 +2624,14 @@ def test_schedule_transfer_runs( ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ScheduleTransferRunsResponse() - response = client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ScheduleTransferRunsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) @@ -2737,7 +2643,7 @@ def test_schedule_transfer_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2747,7 +2653,6 @@ def test_schedule_transfer_runs_empty_call(): client.schedule_transfer_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ScheduleTransferRunsRequest() @@ -2757,7 +2662,7 @@ async def test_schedule_transfer_runs_async( request_type=datatransfer.ScheduleTransferRunsRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2772,13 +2677,11 @@ async def test_schedule_transfer_runs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ScheduleTransferRunsResponse() ) - response = await client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ScheduleTransferRunsRequest() # Establish that the response is the type that we expect. @@ -2791,11 +2694,14 @@ async def test_schedule_transfer_runs_async_from_dict(): def test_schedule_transfer_runs_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ScheduleTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2803,7 +2709,6 @@ def test_schedule_transfer_runs_field_headers(): type(client.transport.schedule_transfer_runs), "__call__" ) as call: call.return_value = datatransfer.ScheduleTransferRunsResponse() - client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -2819,12 +2724,13 @@ def test_schedule_transfer_runs_field_headers(): @pytest.mark.asyncio async def test_schedule_transfer_runs_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ScheduleTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2834,7 +2740,6 @@ async def test_schedule_transfer_runs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ScheduleTransferRunsResponse() ) - await client.schedule_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -2848,7 +2753,9 @@ async def test_schedule_transfer_runs_field_headers_async(): def test_schedule_transfer_runs_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2856,33 +2763,27 @@ def test_schedule_transfer_runs_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ScheduleTransferRunsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.schedule_transfer_runs( parent="parent_value", - start_time=timestamp.Timestamp(seconds=751), - end_time=timestamp.Timestamp(seconds=751), + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - - assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp( - seconds=751 - ) - - assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp( - seconds=751 - ) + # assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751) + # assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751) def test_schedule_transfer_runs_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2890,15 +2791,15 @@ def test_schedule_transfer_runs_flattened_error(): client.schedule_transfer_runs( datatransfer.ScheduleTransferRunsRequest(), parent="parent_value", - start_time=timestamp.Timestamp(seconds=751), - end_time=timestamp.Timestamp(seconds=751), + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), ) @pytest.mark.asyncio async def test_schedule_transfer_runs_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2915,30 +2816,23 @@ async def test_schedule_transfer_runs_flattened_async(): # using the keyword arguments to the method. response = await client.schedule_transfer_runs( parent="parent_value", - start_time=timestamp.Timestamp(seconds=751), - end_time=timestamp.Timestamp(seconds=751), + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - - assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp( - seconds=751 - ) - - assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp( - seconds=751 - ) + # assert args[0].start_time == timestamp_pb2.Timestamp(seconds=751) + # assert args[0].end_time == timestamp_pb2.Timestamp(seconds=751) @pytest.mark.asyncio async def test_schedule_transfer_runs_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2947,8 +2841,8 @@ async def test_schedule_transfer_runs_flattened_error_async(): await client.schedule_transfer_runs( datatransfer.ScheduleTransferRunsRequest(), parent="parent_value", - start_time=timestamp.Timestamp(seconds=751), - end_time=timestamp.Timestamp(seconds=751), + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -2956,7 +2850,7 @@ def test_start_manual_transfer_runs( transport: str = "grpc", request_type=datatransfer.StartManualTransferRunsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2969,17 +2863,14 @@ def test_start_manual_transfer_runs( ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.StartManualTransferRunsResponse() - response = client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.StartManualTransferRunsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.StartManualTransferRunsResponse) @@ -2991,7 +2882,7 @@ def test_start_manual_transfer_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3001,7 +2892,6 @@ def test_start_manual_transfer_runs_empty_call(): client.start_manual_transfer_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.StartManualTransferRunsRequest() @@ -3011,7 +2901,7 @@ async def test_start_manual_transfer_runs_async( request_type=datatransfer.StartManualTransferRunsRequest, ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3026,13 +2916,11 @@ async def test_start_manual_transfer_runs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.StartManualTransferRunsResponse() ) - response = await client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.StartManualTransferRunsRequest() # Establish that the response is the type that we expect. @@ -3045,11 +2933,14 @@ async def test_start_manual_transfer_runs_async_from_dict(): def test_start_manual_transfer_runs_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.StartManualTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3057,7 +2948,6 @@ def test_start_manual_transfer_runs_field_headers(): type(client.transport.start_manual_transfer_runs), "__call__" ) as call: call.return_value = datatransfer.StartManualTransferRunsResponse() - client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -3073,12 +2963,13 @@ def test_start_manual_transfer_runs_field_headers(): @pytest.mark.asyncio async def test_start_manual_transfer_runs_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.StartManualTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3088,7 +2979,6 @@ async def test_start_manual_transfer_runs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.StartManualTransferRunsResponse() ) - await client.start_manual_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -3105,7 +2995,7 @@ def test_get_transfer_run( transport: str = "grpc", request_type=datatransfer.GetTransferRunRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3124,29 +3014,20 @@ def test_get_transfer_run( notification_pubsub_topic="notification_pubsub_topic_value", destination_dataset_id="destination_dataset_id_value", ) - response = client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferRunRequest() # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferRun) - assert response.name == "name_value" - assert response.data_source_id == "data_source_id_value" - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.schedule == "schedule_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -3158,7 +3039,7 @@ def test_get_transfer_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3166,7 +3047,6 @@ def test_get_transfer_run_empty_call(): client.get_transfer_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferRunRequest() @@ -3175,7 +3055,7 @@ async def test_get_transfer_run_async( transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferRunRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3195,28 +3075,20 @@ async def test_get_transfer_run_async( notification_pubsub_topic="notification_pubsub_topic_value", ) ) - response = await client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferRunRequest() # Establish that the response is the type that we expect. assert isinstance(response, transfer.TransferRun) - assert response.name == "name_value" - assert response.data_source_id == "data_source_id_value" - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.schedule == "schedule_value" - assert response.notification_pubsub_topic == "notification_pubsub_topic_value" @@ -3226,17 +3098,19 @@ async def test_get_transfer_run_async_from_dict(): def test_get_transfer_run_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetTransferRunRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: call.return_value = transfer.TransferRun() - client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. @@ -3252,12 +3126,13 @@ def test_get_transfer_run_field_headers(): @pytest.mark.asyncio async def test_get_transfer_run_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.GetTransferRunRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3265,7 +3140,6 @@ async def test_get_transfer_run_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( transfer.TransferRun() ) - await client.get_transfer_run(request) # Establish that the underlying gRPC stub method was called. @@ -3279,13 +3153,14 @@ async def test_get_transfer_run_field_headers_async(): def test_get_transfer_run_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = transfer.TransferRun() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_transfer_run(name="name_value",) @@ -3294,12 +3169,13 @@ def test_get_transfer_run_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_transfer_run_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3312,7 +3188,7 @@ def test_get_transfer_run_flattened_error(): @pytest.mark.asyncio async def test_get_transfer_run_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3331,14 +3207,13 @@ async def test_get_transfer_run_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_transfer_run_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3353,7 +3228,7 @@ def test_delete_transfer_run( transport: str = "grpc", request_type=datatransfer.DeleteTransferRunRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3366,13 +3241,11 @@ def test_delete_transfer_run( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferRunRequest() # Establish that the response is the type that we expect. @@ -3387,7 +3260,7 @@ def test_delete_transfer_run_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3397,7 +3270,6 @@ def test_delete_transfer_run_empty_call(): client.delete_transfer_run() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferRunRequest() @@ -3406,7 +3278,7 @@ async def test_delete_transfer_run_async( transport: str = "grpc_asyncio", request_type=datatransfer.DeleteTransferRunRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3419,13 +3291,11 @@ async def test_delete_transfer_run_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferRunRequest() # Establish that the response is the type that we expect. @@ -3438,11 +3308,14 @@ async def test_delete_transfer_run_async_from_dict(): def test_delete_transfer_run_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.DeleteTransferRunRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3450,7 +3323,6 @@ def test_delete_transfer_run_field_headers(): type(client.transport.delete_transfer_run), "__call__" ) as call: call.return_value = None - client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. @@ -3466,12 +3338,13 @@ def test_delete_transfer_run_field_headers(): @pytest.mark.asyncio async def test_delete_transfer_run_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.DeleteTransferRunRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3479,7 +3352,6 @@ async def test_delete_transfer_run_field_headers_async(): type(client.transport.delete_transfer_run), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_run(request) # Establish that the underlying gRPC stub method was called. @@ -3493,7 +3365,9 @@ async def test_delete_transfer_run_field_headers_async(): def test_delete_transfer_run_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3501,7 +3375,6 @@ def test_delete_transfer_run_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_transfer_run(name="name_value",) @@ -3510,12 +3383,13 @@ def test_delete_transfer_run_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_transfer_run_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3528,7 +3402,7 @@ def test_delete_transfer_run_flattened_error(): @pytest.mark.asyncio async def test_delete_transfer_run_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3547,14 +3421,13 @@ async def test_delete_transfer_run_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_transfer_run_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3569,7 +3442,7 @@ def test_list_transfer_runs( transport: str = "grpc", request_type=datatransfer.ListTransferRunsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3584,19 +3457,15 @@ def test_list_transfer_runs( call.return_value = datatransfer.ListTransferRunsResponse( next_page_token="next_page_token_value", ) - response = client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferRunsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferRunsPager) - assert response.next_page_token == "next_page_token_value" @@ -3608,7 +3477,7 @@ def test_list_transfer_runs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3618,7 +3487,6 @@ def test_list_transfer_runs_empty_call(): client.list_transfer_runs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferRunsRequest() @@ -3627,7 +3495,7 @@ async def test_list_transfer_runs_async( transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferRunsRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3644,18 +3512,15 @@ async def test_list_transfer_runs_async( next_page_token="next_page_token_value", ) ) - response = await client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferRunsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTransferRunsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3665,11 +3530,14 @@ async def test_list_transfer_runs_async_from_dict(): def test_list_transfer_runs_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3677,7 +3545,6 @@ def test_list_transfer_runs_field_headers(): type(client.transport.list_transfer_runs), "__call__" ) as call: call.return_value = datatransfer.ListTransferRunsResponse() - client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -3693,12 +3560,13 @@ def test_list_transfer_runs_field_headers(): @pytest.mark.asyncio async def test_list_transfer_runs_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferRunsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3708,7 +3576,6 @@ async def test_list_transfer_runs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ListTransferRunsResponse() ) - await client.list_transfer_runs(request) # Establish that the underlying gRPC stub method was called. @@ -3722,7 +3589,9 @@ async def test_list_transfer_runs_field_headers_async(): def test_list_transfer_runs_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3730,7 +3599,6 @@ def test_list_transfer_runs_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ListTransferRunsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_transfer_runs(parent="parent_value",) @@ -3739,12 +3607,13 @@ def test_list_transfer_runs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_transfer_runs_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3757,7 +3626,7 @@ def test_list_transfer_runs_flattened_error(): @pytest.mark.asyncio async def test_list_transfer_runs_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3778,14 +3647,13 @@ async def test_list_transfer_runs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_transfer_runs_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3797,7 +3665,7 @@ async def test_list_transfer_runs_flattened_error_async(): def test_list_transfer_runs_pager(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3839,7 +3707,7 @@ def test_list_transfer_runs_pager(): def test_list_transfer_runs_pages(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3874,7 +3742,7 @@ def test_list_transfer_runs_pages(): @pytest.mark.asyncio async def test_list_transfer_runs_async_pager(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3917,7 +3785,7 @@ async def test_list_transfer_runs_async_pager(): @pytest.mark.asyncio async def test_list_transfer_runs_async_pages(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3958,7 +3826,7 @@ def test_list_transfer_logs( transport: str = "grpc", request_type=datatransfer.ListTransferLogsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3973,19 +3841,15 @@ def test_list_transfer_logs( call.return_value = datatransfer.ListTransferLogsResponse( next_page_token="next_page_token_value", ) - response = client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferLogsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferLogsPager) - assert response.next_page_token == "next_page_token_value" @@ -3997,7 +3861,7 @@ def test_list_transfer_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4007,7 +3871,6 @@ def test_list_transfer_logs_empty_call(): client.list_transfer_logs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferLogsRequest() @@ -4016,7 +3879,7 @@ async def test_list_transfer_logs_async( transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferLogsRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4033,18 +3896,15 @@ async def test_list_transfer_logs_async( next_page_token="next_page_token_value", ) ) - response = await client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferLogsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTransferLogsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -4054,11 +3914,14 @@ async def test_list_transfer_logs_async_from_dict(): def test_list_transfer_logs_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4066,7 +3929,6 @@ def test_list_transfer_logs_field_headers(): type(client.transport.list_transfer_logs), "__call__" ) as call: call.return_value = datatransfer.ListTransferLogsResponse() - client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. @@ -4082,12 +3944,13 @@ def test_list_transfer_logs_field_headers(): @pytest.mark.asyncio async def test_list_transfer_logs_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.ListTransferLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4097,7 +3960,6 @@ async def test_list_transfer_logs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.ListTransferLogsResponse() ) - await client.list_transfer_logs(request) # Establish that the underlying gRPC stub method was called. @@ -4111,7 +3973,9 @@ async def test_list_transfer_logs_field_headers_async(): def test_list_transfer_logs_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4119,7 +3983,6 @@ def test_list_transfer_logs_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.ListTransferLogsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_transfer_logs(parent="parent_value",) @@ -4128,12 +3991,13 @@ def test_list_transfer_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_transfer_logs_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4146,7 +4010,7 @@ def test_list_transfer_logs_flattened_error(): @pytest.mark.asyncio async def test_list_transfer_logs_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4167,14 +4031,13 @@ async def test_list_transfer_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_transfer_logs_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4186,7 +4049,7 @@ async def test_list_transfer_logs_flattened_error_async(): def test_list_transfer_logs_pager(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4231,7 +4094,7 @@ def test_list_transfer_logs_pager(): def test_list_transfer_logs_pages(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + client = DataTransferServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4269,7 +4132,7 @@ def test_list_transfer_logs_pages(): @pytest.mark.asyncio async def test_list_transfer_logs_async_pager(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4315,7 +4178,7 @@ async def test_list_transfer_logs_async_pager(): @pytest.mark.asyncio async def test_list_transfer_logs_async_pages(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4359,7 +4222,7 @@ def test_check_valid_creds( transport: str = "grpc", request_type=datatransfer.CheckValidCredsRequest ): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4372,19 +4235,15 @@ def test_check_valid_creds( ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.CheckValidCredsResponse(has_valid_creds=True,) - response = client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CheckValidCredsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.CheckValidCredsResponse) - assert response.has_valid_creds is True @@ -4396,7 +4255,7 @@ def test_check_valid_creds_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4406,7 +4265,6 @@ def test_check_valid_creds_empty_call(): client.check_valid_creds() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CheckValidCredsRequest() @@ -4415,7 +4273,7 @@ async def test_check_valid_creds_async( transport: str = "grpc_asyncio", request_type=datatransfer.CheckValidCredsRequest ): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4430,18 +4288,15 @@ async def test_check_valid_creds_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.CheckValidCredsResponse(has_valid_creds=True,) ) - response = await client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CheckValidCredsRequest() # Establish that the response is the type that we expect. assert isinstance(response, datatransfer.CheckValidCredsResponse) - assert response.has_valid_creds is True @@ -4451,11 +4306,14 @@ async def test_check_valid_creds_async_from_dict(): def test_check_valid_creds_field_headers(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.CheckValidCredsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4463,7 +4321,6 @@ def test_check_valid_creds_field_headers(): type(client.transport.check_valid_creds), "__call__" ) as call: call.return_value = datatransfer.CheckValidCredsResponse() - client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. @@ -4479,12 +4336,13 @@ def test_check_valid_creds_field_headers(): @pytest.mark.asyncio async def test_check_valid_creds_field_headers_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datatransfer.CheckValidCredsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4494,7 +4352,6 @@ async def test_check_valid_creds_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datatransfer.CheckValidCredsResponse() ) - await client.check_valid_creds(request) # Establish that the underlying gRPC stub method was called. @@ -4508,7 +4365,9 @@ async def test_check_valid_creds_field_headers_async(): def test_check_valid_creds_flattened(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4516,7 +4375,6 @@ def test_check_valid_creds_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = datatransfer.CheckValidCredsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.check_valid_creds(name="name_value",) @@ -4525,12 +4383,13 @@ def test_check_valid_creds_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_check_valid_creds_flattened_error(): - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4543,7 +4402,7 @@ def test_check_valid_creds_flattened_error(): @pytest.mark.asyncio async def test_check_valid_creds_flattened_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4564,14 +4423,13 @@ async def test_check_valid_creds_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_check_valid_creds_flattened_error_async(): client = DataTransferServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4585,16 +4443,16 @@ async def test_check_valid_creds_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataTransferServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.DataTransferServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataTransferServiceClient( @@ -4604,7 +4462,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.DataTransferServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = DataTransferServiceClient( @@ -4615,7 +4473,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.DataTransferServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = DataTransferServiceClient(transport=transport) assert client.transport is transport @@ -4624,13 +4482,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.DataTransferServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.DataTransferServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -4645,23 +4503,25 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) assert isinstance(client.transport, transports.DataTransferServiceGrpcTransport,) def test_data_transfer_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.DataTransferServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -4673,7 +4533,7 @@ def test_data_transfer_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.DataTransferServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -4699,15 +4559,37 @@ def test_data_transfer_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_data_transfer_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTransferServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_transfer_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataTransferServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -4720,19 +4602,33 @@ def test_data_transfer_service_base_transport_with_credentials_file(): def test_data_transfer_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataTransferServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_data_transfer_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataTransferServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_data_transfer_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) DataTransferServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -4740,20 +4636,156 @@ def test_data_transfer_service_auth_adc(): ) -def test_data_transfer_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_data_transfer_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.DataTransferServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_data_transfer_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTransferServiceGrpcTransport, grpc_helpers), + (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_data_transfer_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerydatatransfer.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="bigquerydatatransfer.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTransferServiceGrpcTransport, grpc_helpers), + (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_transfer_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "bigquerydatatransfer.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTransferServiceGrpcTransport, grpc_helpers), + (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_data_transfer_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerydatatransfer.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -4764,7 +4796,7 @@ def test_data_transfer_service_transport_auth_adc(): def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -4803,7 +4835,7 @@ def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls( def test_data_transfer_service_host_no_port(): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="bigquerydatatransfer.googleapis.com" ), @@ -4813,7 +4845,7 @@ def test_data_transfer_service_host_no_port(): def test_data_transfer_service_host_with_port(): client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="bigquerydatatransfer.googleapis.com:8000" ), @@ -4869,9 +4901,9 @@ def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4948,7 +4980,6 @@ def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class): def test_data_source_path(): project = "squid" data_source = "clam" - expected = "projects/{project}/dataSources/{data_source}".format( project=project, data_source=data_source, ) @@ -4972,7 +5003,6 @@ def test_run_path(): project = "oyster" transfer_config = "nudibranch" run = "cuttlefish" - expected = "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format( project=project, transfer_config=transfer_config, run=run, ) @@ -4996,7 +5026,6 @@ def test_parse_run_path(): def test_transfer_config_path(): project = "scallop" transfer_config = "abalone" - expected = "projects/{project}/transferConfigs/{transfer_config}".format( project=project, transfer_config=transfer_config, ) @@ -5018,7 +5047,6 @@ def test_parse_transfer_config_path(): def test_common_billing_account_path(): billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5039,7 +5067,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) actual = DataTransferServiceClient.common_folder_path(folder) assert expected == actual @@ -5058,7 +5085,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) actual = DataTransferServiceClient.common_organization_path(organization) assert expected == actual @@ -5077,7 +5103,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) actual = DataTransferServiceClient.common_project_path(project) assert expected == actual @@ -5097,7 +5122,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "scallop" location = "abalone" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -5124,7 +5148,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.DataTransferServiceTransport, "_prep_wrapped_messages" ) as prep: client = DataTransferServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -5133,6 +5157,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = DataTransferServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)