From 8199bf62502ff8f809c26d3f17705399775ae320 Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Thu, 3 Sep 2020 06:10:11 -0700 Subject: [PATCH 1/3] changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. --- .../services/data_catalog/async_client.py | 76 ++++--- .../services/data_catalog/client.py | 13 +- .../services/data_catalog/transports/base.py | 76 +++---- .../services/data_catalog/transports/grpc.py | 16 +- .../data_catalog/transports/grpc_asyncio.py | 10 +- google/cloud/datacatalog_v1beta1/__init__.py | 4 +- .../services/data_catalog/async_client.py | 94 ++++---- .../services/data_catalog/client.py | 13 +- .../services/data_catalog/transports/base.py | 94 ++++---- .../services/data_catalog/transports/grpc.py | 16 +- .../data_catalog/transports/grpc_asyncio.py | 10 +- .../policy_tag_manager/async_client.py | 42 ++-- .../services/policy_tag_manager/client.py | 13 +- .../policy_tag_manager/transports/base.py | 42 ++-- .../policy_tag_manager/transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 10 +- .../async_client.py | 16 +- .../client.py | 13 +- .../transports/base.py | 20 +- .../transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 10 +- noxfile.py | 2 +- samples/snippets/README.rst | 24 +- synth.metadata | 169 +++++++++++++- .../gapic/datacatalog_v1/test_data_catalog.py | 213 ++++++++++-------- .../datacatalog_v1beta1/test_data_catalog.py | 187 ++++++++------- .../test_policy_tag_manager.py | 103 ++++++--- .../test_policy_tag_manager_serialization.py | 32 +++ 28 files changed, 888 insertions(+), 462 deletions(-) diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 84dac12f..88de393c 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -41,7 +41,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from .transports.base import DataCatalogTransport +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport from .client import DataCatalogClient @@ -56,16 +56,16 @@ class DataCatalogAsyncClient: DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) - - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + tag_path = staticmethod(DataCatalogClient.tag_path) entry_path = staticmethod(DataCatalogClient.entry_path) - tag_path = staticmethod(DataCatalogClient.tag_path) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + + tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + from_service_account_file = DataCatalogClient.from_service_account_file from_service_account_json = from_service_account_file @@ -79,6 +79,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -111,7 +112,10 @@ def __init__( """ self._client = DataCatalogClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def search_catalog( @@ -219,7 +223,7 @@ async def search_catalog( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -338,7 +342,7 @@ async def create_entry_group( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_entry_group, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -425,7 +429,7 @@ async def get_entry_group( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -511,7 +515,7 @@ async def update_entry_group( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_entry_group, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -583,7 +587,7 @@ async def delete_entry_group( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_entry_group, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -665,7 +669,7 @@ async def list_entry_groups( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -782,7 +786,7 @@ async def create_entry( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_entry, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -899,7 +903,7 @@ async def update_entry( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_entry, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -973,7 +977,7 @@ async def delete_entry( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_entry, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1059,7 +1063,7 @@ async def get_entry( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1128,7 +1132,7 @@ async def lookup_entry( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1204,7 +1208,7 @@ async def list_entries( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1314,7 +1318,7 @@ async def create_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1393,7 +1397,7 @@ async def get_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1495,7 +1499,7 @@ async def update_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1579,7 +1583,7 @@ async def delete_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1682,7 +1686,7 @@ async def create_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1789,7 +1793,7 @@ async def update_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1874,7 +1878,7 @@ async def rename_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.rename_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1957,7 +1961,7 @@ async def delete_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2051,7 +2055,7 @@ async def create_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2138,7 +2142,7 @@ async def update_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2204,7 +2208,7 @@ async def delete_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2290,7 +2294,7 @@ async def list_tags( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2454,7 +2458,7 @@ async def set_iam_policy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2622,7 +2626,7 @@ async def get_iam_policy( predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2687,7 +2691,7 @@ async def test_iam_permissions( rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2704,13 +2708,13 @@ async def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DataCatalogAsyncClient",) diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py index b0e61bf2..ab11c91f 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -43,7 +43,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from .transports.base import DataCatalogTransport +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataCatalogGrpcTransport from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport @@ -232,6 +232,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -257,6 +258,11 @@ def __init__( (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -314,6 +320,7 @@ def __init__( api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def search_catalog( @@ -2901,13 +2908,13 @@ def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DataCatalogClient",) diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 326b640d..7a2f3159 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -33,13 +33,13 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class DataCatalogTransport(abc.ABC): @@ -55,6 +55,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -72,6 +73,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -99,9 +105,9 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.search_catalog: gapic_v1.method.wrap_method( @@ -113,10 +119,10 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, default_timeout=None, client_info=_client_info, + self.create_entry_group, default_timeout=None, client_info=client_info, ), self.get_entry_group: gapic_v1.method.wrap_method( self.get_entry_group, @@ -127,13 +133,13 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, default_timeout=None, client_info=_client_info, + self.update_entry_group, default_timeout=None, client_info=client_info, ), self.delete_entry_group: gapic_v1.method.wrap_method( - self.delete_entry_group, default_timeout=None, client_info=_client_info, + self.delete_entry_group, default_timeout=None, client_info=client_info, ), self.list_entry_groups: gapic_v1.method.wrap_method( self.list_entry_groups, @@ -144,16 +150,16 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, default_timeout=None, client_info=_client_info, + self.create_entry, default_timeout=None, client_info=client_info, ), self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, default_timeout=None, client_info=_client_info, + self.update_entry, default_timeout=None, client_info=client_info, ), self.delete_entry: gapic_v1.method.wrap_method( - self.delete_entry, default_timeout=None, client_info=_client_info, + self.delete_entry, default_timeout=None, client_info=client_info, ), self.get_entry: gapic_v1.method.wrap_method( self.get_entry, @@ -164,7 +170,7 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.lookup_entry: gapic_v1.method.wrap_method( self.lookup_entry, @@ -175,7 +181,7 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.list_entries: gapic_v1.method.wrap_method( self.list_entries, @@ -186,54 +192,48 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.create_tag_template: gapic_v1.method.wrap_method( - self.create_tag_template, - default_timeout=None, - client_info=_client_info, + self.create_tag_template, default_timeout=None, client_info=client_info, ), self.get_tag_template: gapic_v1.method.wrap_method( - self.get_tag_template, default_timeout=None, client_info=_client_info, + self.get_tag_template, default_timeout=None, client_info=client_info, ), self.update_tag_template: gapic_v1.method.wrap_method( - self.update_tag_template, - default_timeout=None, - client_info=_client_info, + self.update_tag_template, default_timeout=None, client_info=client_info, ), self.delete_tag_template: gapic_v1.method.wrap_method( - self.delete_tag_template, - default_timeout=None, - client_info=_client_info, + self.delete_tag_template, default_timeout=None, client_info=client_info, ), self.create_tag_template_field: gapic_v1.method.wrap_method( self.create_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.update_tag_template_field: gapic_v1.method.wrap_method( self.update_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.rename_tag_template_field: gapic_v1.method.wrap_method( self.rename_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.delete_tag_template_field: gapic_v1.method.wrap_method( self.delete_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.create_tag: gapic_v1.method.wrap_method( - self.create_tag, default_timeout=None, client_info=_client_info, + self.create_tag, default_timeout=None, client_info=client_info, ), self.update_tag: gapic_v1.method.wrap_method( - self.update_tag, default_timeout=None, client_info=_client_info, + self.update_tag, default_timeout=None, client_info=client_info, ), self.delete_tag: gapic_v1.method.wrap_method( - self.delete_tag, default_timeout=None, client_info=_client_info, + self.delete_tag, default_timeout=None, client_info=client_info, ), self.list_tags: gapic_v1.method.wrap_method( self.list_tags, @@ -244,10 +244,10 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, default_timeout=None, client_info=_client_info, + self.set_iam_policy, default_timeout=None, client_info=client_info, ), self.get_iam_policy: gapic_v1.method.wrap_method( self.get_iam_policy, @@ -258,12 +258,12 @@ def _prep_wrapped_messages(self): predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.test_iam_permissions: gapic_v1.method.wrap_method( self.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), } diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index 9de2ca50..3fd33e16 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -18,6 +18,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -31,7 +32,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DataCatalogTransport +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO class DataCatalogGrpcTransport(DataCatalogTransport): @@ -60,7 +61,8 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -89,6 +91,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -144,6 +151,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -154,7 +162,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -188,7 +196,7 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 24fdb5c9..8851137f 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -17,6 +17,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -30,7 +31,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DataCatalogTransport +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .grpc import DataCatalogGrpcTransport @@ -103,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -132,6 +134,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -180,6 +187,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index be0bdd8e..16534418 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,7 +103,6 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", - "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -141,6 +140,7 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", + "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "PolicyTagManagerSerializationClient", + "DataCatalogClient", ) diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index ee21855f..9ed89045 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -41,7 +41,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from .transports.base import DataCatalogTransport +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport from .client import DataCatalogClient @@ -56,15 +56,15 @@ class DataCatalogAsyncClient: DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + tag_path = staticmethod(DataCatalogClient.tag_path) entry_path = staticmethod(DataCatalogClient.entry_path) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) - tag_path = staticmethod(DataCatalogClient.tag_path) + entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) from_service_account_file = DataCatalogClient.from_service_account_file from_service_account_json = from_service_account_file @@ -79,6 +79,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -111,7 +112,10 @@ def __init__( """ self._client = DataCatalogClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def search_catalog( @@ -213,7 +217,7 @@ async def search_catalog( rpc = gapic_v1.method_async.wrap_method( self._client._transport.search_catalog, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -320,7 +324,7 @@ async def create_entry_group( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_entry_group, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -407,7 +411,7 @@ async def update_entry_group( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_entry_group, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -495,11 +499,11 @@ async def get_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -573,11 +577,11 @@ async def delete_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -653,7 +657,7 @@ async def list_entry_groups( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_entry_groups, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -770,7 +774,7 @@ async def create_entry( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_entry, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -887,7 +891,7 @@ async def update_entry( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_entry, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -965,11 +969,11 @@ async def delete_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1053,11 +1057,11 @@ async def get_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1124,11 +1128,11 @@ async def lookup_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1198,7 +1202,7 @@ async def list_entries( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_entries, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1308,7 +1312,7 @@ async def create_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1391,11 +1395,11 @@ async def get_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1497,7 +1501,7 @@ async def update_tag_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag_template, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1585,11 +1589,11 @@ async def delete_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1692,7 +1696,7 @@ async def create_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1799,7 +1803,7 @@ async def update_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1884,7 +1888,7 @@ async def rename_tag_template_field( rpc = gapic_v1.method_async.wrap_method( self._client._transport.rename_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1971,11 +1975,11 @@ async def delete_tag_template_field( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2070,7 +2074,7 @@ async def create_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2157,7 +2161,7 @@ async def update_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2227,11 +2231,11 @@ async def delete_tag( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2316,11 +2320,11 @@ async def list_tags( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2484,7 +2488,7 @@ async def set_iam_policy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2646,7 +2650,7 @@ async def get_iam_policy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2711,7 +2715,7 @@ async def test_iam_permissions( rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2728,13 +2732,13 @@ async def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DataCatalogAsyncClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 08b4f5b7..6fcee78d 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -43,7 +43,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from .transports.base import DataCatalogTransport +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DataCatalogGrpcTransport from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport @@ -232,6 +232,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, DataCatalogTransport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -257,6 +258,11 @@ def __init__( (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -314,6 +320,7 @@ def __init__( api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def search_catalog( @@ -2893,13 +2900,13 @@ def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DataCatalogClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py index fac99233..097d283d 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -33,13 +33,13 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class DataCatalogTransport(abc.ABC): @@ -55,6 +55,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -72,6 +73,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -99,19 +105,19 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.search_catalog: gapic_v1.method.wrap_method( - self.search_catalog, default_timeout=None, client_info=_client_info, + self.search_catalog, default_timeout=None, client_info=client_info, ), self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, default_timeout=None, client_info=_client_info, + self.create_entry_group, default_timeout=None, client_info=client_info, ), self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, default_timeout=None, client_info=_client_info, + self.update_entry_group, default_timeout=None, client_info=client_info, ), self.get_entry_group: gapic_v1.method.wrap_method( self.get_entry_group, @@ -120,11 +126,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.delete_entry_group: gapic_v1.method.wrap_method( self.delete_entry_group, @@ -133,20 +139,20 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.list_entry_groups: gapic_v1.method.wrap_method( - self.list_entry_groups, default_timeout=None, client_info=_client_info, + self.list_entry_groups, default_timeout=None, client_info=client_info, ), self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, default_timeout=None, client_info=_client_info, + self.create_entry, default_timeout=None, client_info=client_info, ), self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, default_timeout=None, client_info=_client_info, + self.update_entry, default_timeout=None, client_info=client_info, ), self.delete_entry: gapic_v1.method.wrap_method( self.delete_entry, @@ -155,11 +161,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.get_entry: gapic_v1.method.wrap_method( self.get_entry, @@ -168,11 +174,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.lookup_entry: gapic_v1.method.wrap_method( self.lookup_entry, @@ -181,19 +187,17 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.list_entries: gapic_v1.method.wrap_method( - self.list_entries, default_timeout=None, client_info=_client_info, + self.list_entries, default_timeout=None, client_info=client_info, ), self.create_tag_template: gapic_v1.method.wrap_method( - self.create_tag_template, - default_timeout=None, - client_info=_client_info, + self.create_tag_template, default_timeout=None, client_info=client_info, ), self.get_tag_template: gapic_v1.method.wrap_method( self.get_tag_template, @@ -202,16 +206,14 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.update_tag_template: gapic_v1.method.wrap_method( - self.update_tag_template, - default_timeout=None, - client_info=_client_info, + self.update_tag_template, default_timeout=None, client_info=client_info, ), self.delete_tag_template: gapic_v1.method.wrap_method( self.delete_tag_template, @@ -220,26 +222,26 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.create_tag_template_field: gapic_v1.method.wrap_method( self.create_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.update_tag_template_field: gapic_v1.method.wrap_method( self.update_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.rename_tag_template_field: gapic_v1.method.wrap_method( self.rename_tag_template_field, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), self.delete_tag_template_field: gapic_v1.method.wrap_method( self.delete_tag_template_field, @@ -248,17 +250,17 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.create_tag: gapic_v1.method.wrap_method( - self.create_tag, default_timeout=None, client_info=_client_info, + self.create_tag, default_timeout=None, client_info=client_info, ), self.update_tag: gapic_v1.method.wrap_method( - self.update_tag, default_timeout=None, client_info=_client_info, + self.update_tag, default_timeout=None, client_info=client_info, ), self.delete_tag: gapic_v1.method.wrap_method( self.delete_tag, @@ -267,11 +269,11 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.list_tags: gapic_v1.method.wrap_method( self.list_tags, @@ -280,22 +282,22 @@ def _prep_wrapped_messages(self): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, ), ), default_timeout=60.0, - client_info=_client_info, + client_info=client_info, ), self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, default_timeout=None, client_info=_client_info, + self.set_iam_policy, default_timeout=None, client_info=client_info, ), self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, default_timeout=None, client_info=_client_info, + self.get_iam_policy, default_timeout=None, client_info=client_info, ), self.test_iam_permissions: gapic_v1.method.wrap_method( self.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), } diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py index 1b96a954..cbd4a26a 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -18,6 +18,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -31,7 +32,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DataCatalogTransport +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO class DataCatalogGrpcTransport(DataCatalogTransport): @@ -60,7 +61,8 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -89,6 +91,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -144,6 +151,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -154,7 +162,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -188,7 +196,7 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py index 1d7f80fd..df77b9b0 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -17,6 +17,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -30,7 +31,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DataCatalogTransport +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO from .grpc import DataCatalogGrpcTransport @@ -103,6 +104,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -132,6 +134,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -180,6 +187,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index de2eaeea..3df25722 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -33,7 +33,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore -from .transports.base import PolicyTagManagerTransport +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport from .client import PolicyTagManagerClient @@ -48,10 +48,10 @@ class PolicyTagManagerAsyncClient: DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT - taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) - policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) + taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + from_service_account_file = PolicyTagManagerClient.from_service_account_file from_service_account_json = from_service_account_file @@ -65,6 +65,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager client. @@ -97,7 +98,10 @@ def __init__( """ self._client = PolicyTagManagerClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def create_taxonomy( @@ -170,7 +174,7 @@ async def create_taxonomy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_taxonomy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -238,7 +242,7 @@ async def delete_taxonomy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_taxonomy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -313,7 +317,7 @@ async def update_taxonomy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_taxonomy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -390,7 +394,7 @@ async def list_taxonomies( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_taxonomies, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -472,7 +476,7 @@ async def get_taxonomy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_taxonomy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -558,7 +562,7 @@ async def create_policy_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_policy_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -625,7 +629,7 @@ async def delete_policy_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_policy_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -702,7 +706,7 @@ async def update_policy_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_policy_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -778,7 +782,7 @@ async def list_policy_tags( rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_policy_tags, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -861,7 +865,7 @@ async def get_policy_tag( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_policy_tag, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -978,7 +982,7 @@ async def get_iam_policy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1095,7 +1099,7 @@ async def set_iam_policy( rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1148,7 +1152,7 @@ async def test_iam_permissions( rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1165,13 +1169,13 @@ async def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("PolicyTagManagerAsyncClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index ac3eec4d..46a8a602 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -35,7 +35,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore -from .transports.base import PolicyTagManagerTransport +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerGrpcTransport from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport @@ -175,6 +175,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, PolicyTagManagerTransport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager client. @@ -200,6 +201,11 @@ def __init__( (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -257,6 +263,7 @@ def __init__( api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def create_taxonomy( @@ -1332,13 +1339,13 @@ def test_iam_permissions( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("PolicyTagManagerClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py index abca4532..8d5b5e7c 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -32,13 +32,13 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class PolicyTagManagerTransport(abc.ABC): @@ -54,6 +54,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -71,6 +72,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -98,51 +104,51 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_taxonomy: gapic_v1.method.wrap_method( - self.create_taxonomy, default_timeout=None, client_info=_client_info, + self.create_taxonomy, default_timeout=None, client_info=client_info, ), self.delete_taxonomy: gapic_v1.method.wrap_method( - self.delete_taxonomy, default_timeout=None, client_info=_client_info, + self.delete_taxonomy, default_timeout=None, client_info=client_info, ), self.update_taxonomy: gapic_v1.method.wrap_method( - self.update_taxonomy, default_timeout=None, client_info=_client_info, + self.update_taxonomy, default_timeout=None, client_info=client_info, ), self.list_taxonomies: gapic_v1.method.wrap_method( - self.list_taxonomies, default_timeout=None, client_info=_client_info, + self.list_taxonomies, default_timeout=None, client_info=client_info, ), self.get_taxonomy: gapic_v1.method.wrap_method( - self.get_taxonomy, default_timeout=None, client_info=_client_info, + self.get_taxonomy, default_timeout=None, client_info=client_info, ), self.create_policy_tag: gapic_v1.method.wrap_method( - self.create_policy_tag, default_timeout=None, client_info=_client_info, + self.create_policy_tag, default_timeout=None, client_info=client_info, ), self.delete_policy_tag: gapic_v1.method.wrap_method( - self.delete_policy_tag, default_timeout=None, client_info=_client_info, + self.delete_policy_tag, default_timeout=None, client_info=client_info, ), self.update_policy_tag: gapic_v1.method.wrap_method( - self.update_policy_tag, default_timeout=None, client_info=_client_info, + self.update_policy_tag, default_timeout=None, client_info=client_info, ), self.list_policy_tags: gapic_v1.method.wrap_method( - self.list_policy_tags, default_timeout=None, client_info=_client_info, + self.list_policy_tags, default_timeout=None, client_info=client_info, ), self.get_policy_tag: gapic_v1.method.wrap_method( - self.get_policy_tag, default_timeout=None, client_info=_client_info, + self.get_policy_tag, default_timeout=None, client_info=client_info, ), self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, default_timeout=None, client_info=_client_info, + self.get_iam_policy, default_timeout=None, client_info=client_info, ), self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, default_timeout=None, client_info=_client_info, + self.set_iam_policy, default_timeout=None, client_info=client_info, ), self.test_iam_permissions: gapic_v1.method.wrap_method( self.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=client_info, ), } diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py index d7fc35f0..d14dd424 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -18,6 +18,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -30,7 +31,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import PolicyTagManagerTransport +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): @@ -59,7 +60,8 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -88,6 +90,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -143,6 +150,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -153,7 +161,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -187,7 +195,7 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py index 217f0a87..d75d30bf 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -17,6 +17,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -29,7 +30,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import PolicyTagManagerTransport +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO from .grpc import PolicyTagManagerGrpcTransport @@ -102,6 +103,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -131,6 +133,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -179,6 +186,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index 474cc182..5e24730a 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -31,7 +31,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from .transports.base import PolicyTagManagerSerializationTransport +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport from .client import PolicyTagManagerSerializationClient @@ -63,6 +63,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager serialization client. @@ -95,7 +96,10 @@ def __init__( """ self._client = PolicyTagManagerSerializationClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def import_taxonomies( @@ -138,7 +142,7 @@ async def import_taxonomies( rpc = gapic_v1.method_async.wrap_method( self._client._transport.import_taxonomies, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -193,7 +197,7 @@ async def export_taxonomies( rpc = gapic_v1.method_async.wrap_method( self._client._transport.export_taxonomies, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -210,13 +214,13 @@ async def export_taxonomies( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("PolicyTagManagerSerializationAsyncClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 445d151d..d5d77322 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -33,7 +33,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanager from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from .transports.base import PolicyTagManagerSerializationTransport +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PolicyTagManagerSerializationGrpcTransport from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport @@ -143,6 +143,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, PolicyTagManagerSerializationTransport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager serialization client. @@ -168,6 +169,11 @@ def __init__( (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -225,6 +231,7 @@ def __init__( api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def import_taxonomies( @@ -345,13 +352,13 @@ def export_taxonomies( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("PolicyTagManagerSerializationClient",) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py index 26360d93..5f5da515 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -29,13 +29,13 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-datacatalog", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class PolicyTagManagerSerializationTransport(abc.ABC): @@ -51,6 +51,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -68,6 +69,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -95,16 +101,16 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.import_taxonomies: gapic_v1.method.wrap_method( - self.import_taxonomies, default_timeout=None, client_info=_client_info, + self.import_taxonomies, default_timeout=None, client_info=client_info, ), self.export_taxonomies: gapic_v1.method.wrap_method( - self.export_taxonomies, default_timeout=None, client_info=_client_info, + self.export_taxonomies, default_timeout=None, client_info=client_info, ), } diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py index d2d74539..c724aa0b 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py @@ -18,6 +18,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -27,7 +28,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from .base import PolicyTagManagerSerializationTransport +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO class PolicyTagManagerSerializationGrpcTransport( @@ -59,7 +60,8 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -88,6 +90,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -143,6 +150,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -153,7 +161,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -187,7 +195,7 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py index 8e47b76f..243cc091 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -17,6 +17,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -26,7 +27,7 @@ from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from .base import PolicyTagManagerSerializationTransport +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO from .grpc import PolicyTagManagerSerializationGrpcTransport @@ -102,6 +103,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -131,6 +133,11 @@ def __init__( is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -179,6 +186,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/noxfile.py b/noxfile.py index f2524069..14e819be 100644 --- a/noxfile.py +++ b/noxfile.py @@ -141,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 343431d9..3476ccea 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Data Catalog Python Samples @@ -15,13 +16,11 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat .. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs - - - Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -32,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -46,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -62,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Lookup entry +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -80,6 +88,7 @@ To run this sample: $ python lookup_entry.py + usage: lookup_entry.py [-h] project_id {bigquery-dataset,bigquery-table,pubsub-topic} ... @@ -107,6 +116,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -122,4 +135,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/synth.metadata b/synth.metadata index 48e1a9a9..bfa323b5 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,8 +3,16 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-datacatalog.git", - "sha": "09d02ebb2738c9663abe060da926c2432d6ffb42" + "remote": "https://github.com/googleapis/python-datacatalog.git", + "sha": "2aac68cae6ac9f96841b9e314af72c0a052ee13f" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "ddaa2026caf2eb00444a14b08500553824a7182a", + "internalRef": "329845759" } }, { @@ -41,5 +49,162 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/datacatalog_v1/services.rst", + "docs/datacatalog_v1/types.rst", + "docs/datacatalog_v1beta1/services.rst", + "docs/datacatalog_v1beta1/types.rst", + "docs/multiprocessing.rst", + "google/cloud/datacatalog/__init__.py", + "google/cloud/datacatalog/py.typed", + "google/cloud/datacatalog_v1/__init__.py", + "google/cloud/datacatalog_v1/proto/common.proto", + "google/cloud/datacatalog_v1/proto/datacatalog.proto", + "google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto", + "google/cloud/datacatalog_v1/proto/schema.proto", + "google/cloud/datacatalog_v1/proto/search.proto", + "google/cloud/datacatalog_v1/proto/table_spec.proto", + "google/cloud/datacatalog_v1/proto/tags.proto", + "google/cloud/datacatalog_v1/proto/timestamps.proto", + "google/cloud/datacatalog_v1/py.typed", + "google/cloud/datacatalog_v1/services/__init__.py", + "google/cloud/datacatalog_v1/services/data_catalog/__init__.py", + "google/cloud/datacatalog_v1/services/data_catalog/async_client.py", + "google/cloud/datacatalog_v1/services/data_catalog/client.py", + "google/cloud/datacatalog_v1/services/data_catalog/pagers.py", + "google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py", + "google/cloud/datacatalog_v1/services/data_catalog/transports/base.py", + "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py", + "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1/types/__init__.py", + "google/cloud/datacatalog_v1/types/common.py", + "google/cloud/datacatalog_v1/types/datacatalog.py", + "google/cloud/datacatalog_v1/types/gcs_fileset_spec.py", + "google/cloud/datacatalog_v1/types/schema.py", + "google/cloud/datacatalog_v1/types/search.py", + "google/cloud/datacatalog_v1/types/table_spec.py", + "google/cloud/datacatalog_v1/types/tags.py", + "google/cloud/datacatalog_v1/types/timestamps.py", + "google/cloud/datacatalog_v1beta1/__init__.py", + "google/cloud/datacatalog_v1beta1/proto/common.proto", + "google/cloud/datacatalog_v1beta1/proto/datacatalog.proto", + "google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto", + "google/cloud/datacatalog_v1beta1/proto/policytagmanager.proto", + "google/cloud/datacatalog_v1beta1/proto/policytagmanagerserialization.proto", + "google/cloud/datacatalog_v1beta1/proto/schema.proto", + "google/cloud/datacatalog_v1beta1/proto/search.proto", + "google/cloud/datacatalog_v1beta1/proto/table_spec.proto", + "google/cloud/datacatalog_v1beta1/proto/tags.proto", + "google/cloud/datacatalog_v1beta1/proto/timestamps.proto", + "google/cloud/datacatalog_v1beta1/py.typed", + "google/cloud/datacatalog_v1beta1/services/__init__.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/client.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py", + "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py", + "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py", + "google/cloud/datacatalog_v1beta1/types/__init__.py", + "google/cloud/datacatalog_v1beta1/types/common.py", + "google/cloud/datacatalog_v1beta1/types/datacatalog.py", + "google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py", + "google/cloud/datacatalog_v1beta1/types/policytagmanager.py", + "google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py", + "google/cloud/datacatalog_v1beta1/types/schema.py", + "google/cloud/datacatalog_v1beta1/types/search.py", + "google/cloud/datacatalog_v1beta1/types/table_spec.py", + "google/cloud/datacatalog_v1beta1/types/tags.py", + "google/cloud/datacatalog_v1beta1/types/timestamps.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_datacatalog_v1_keywords.py", + "scripts/fixup_datacatalog_v1beta1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/datacatalog_v1/__init__.py", + "tests/unit/gapic/datacatalog_v1/test_data_catalog.py", + "tests/unit/gapic/datacatalog_v1beta1/__init__.py", + "tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py", + "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py", + "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py" ] } \ No newline at end of file diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 79523fa6..fe695445 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -167,6 +167,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -183,6 +184,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -199,6 +201,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -218,6 +221,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -238,6 +242,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -258,6 +263,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -279,6 +285,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -309,6 +316,7 @@ def test_data_catalog_client_client_options_scopes( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -339,6 +347,7 @@ def test_data_catalog_client_client_options_credentials_file( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -356,6 +365,7 @@ def test_data_catalog_client_client_options_from_dict(): api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -590,8 +600,8 @@ def test_search_catalog_pages(): RuntimeError, ) pages = list(client.search_catalog(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -663,10 +673,10 @@ async def test_search_catalog_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.search_catalog(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.search_catalog(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_entry_group( @@ -1803,8 +1813,8 @@ def test_list_entry_groups_pages(): RuntimeError, ) pages = list(client.list_entry_groups(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1880,10 +1890,10 @@ async def test_list_entry_groups_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_entry_groups(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_entry_groups(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_entry( @@ -3083,8 +3093,8 @@ def test_list_entries_pages(): RuntimeError, ) pages = list(client.list_entries(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -3156,10 +3166,10 @@ async def test_list_entries_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_entries(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_entries(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_tag_template( @@ -5725,8 +5735,8 @@ def test_list_tags_pages(): RuntimeError, ) pages = list(client.list_tags(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -5780,10 +5790,10 @@ async def test_list_tags_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_tags(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_tags(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_set_iam_policy( @@ -6697,57 +6707,36 @@ def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel -def test_tag_template_field_path(): - project = "squid" - location = "clam" - tag_template = "whelk" - field = "octopus" - - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( - project=project, location=location, tag_template=tag_template, field=field, - ) - actual = DataCatalogClient.tag_template_field_path( - project, location, tag_template, field - ) - assert expected == actual - - -def test_parse_tag_template_field_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "tag_template": "cuttlefish", - "field": "mussel", - } - path = DataCatalogClient.tag_template_field_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_field_path(path) - assert expected == actual - - -def test_tag_template_path(): +def test_tag_path(): project = "squid" location = "clam" - tag_template = "whelk" + entry_group = "whelk" + entry = "octopus" + tag = "oyster" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( - project=project, location=location, tag_template=tag_template, + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( + project=project, + location=location, + entry_group=entry_group, + entry=entry, + tag=tag, ) - actual = DataCatalogClient.tag_template_path(project, location, tag_template) + actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) assert expected == actual -def test_parse_tag_template_path(): +def test_parse_tag_path(): expected = { - "project": "octopus", - "location": "oyster", - "tag_template": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "entry_group": "mussel", + "entry": "winkle", + "tag": "nautilus", } - path = DataCatalogClient.tag_template_path(**expected) + path = DataCatalogClient.tag_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_path(path) + actual = DataCatalogClient.parse_tag_path(path) assert expected == actual @@ -6778,59 +6767,101 @@ def test_parse_entry_path(): assert expected == actual -def test_tag_path(): +def test_entry_group_path(): project = "squid" location = "clam" entry_group = "whelk" - entry = "octopus" - tag = "oyster" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( - project=project, - location=location, - entry_group=entry_group, - entry=entry, - tag=tag, + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( + project=project, location=location, entry_group=entry_group, ) - actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) + actual = DataCatalogClient.entry_group_path(project, location, entry_group) assert expected == actual -def test_parse_tag_path(): +def test_parse_entry_group_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - "entry": "winkle", - "tag": "nautilus", + "project": "octopus", + "location": "oyster", + "entry_group": "nudibranch", } - path = DataCatalogClient.tag_path(**expected) + path = DataCatalogClient.entry_group_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_path(path) + actual = DataCatalogClient.parse_entry_group_path(path) assert expected == actual -def test_entry_group_path(): +def test_tag_template_path(): project = "squid" location = "clam" - entry_group = "whelk" + tag_template = "whelk" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( - project=project, location=location, entry_group=entry_group, + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( + project=project, location=location, tag_template=tag_template, ) - actual = DataCatalogClient.entry_group_path(project, location, entry_group) + actual = DataCatalogClient.tag_template_path(project, location, tag_template) assert expected == actual -def test_parse_entry_group_path(): +def test_parse_tag_template_path(): expected = { "project": "octopus", "location": "oyster", - "entry_group": "nudibranch", + "tag_template": "nudibranch", } - path = DataCatalogClient.entry_group_path(**expected) + path = DataCatalogClient.tag_template_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_group_path(path) + actual = DataCatalogClient.parse_tag_template_path(path) assert expected == actual + + +def test_tag_template_field_path(): + project = "squid" + location = "clam" + tag_template = "whelk" + field = "octopus" + + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( + project=project, location=location, tag_template=tag_template, field=field, + ) + actual = DataCatalogClient.tag_template_field_path( + project, location, tag_template, field + ) + assert expected == actual + + +def test_parse_tag_template_field_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "tag_template": "cuttlefish", + "field": "mussel", + } + path = DataCatalogClient.tag_template_field_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataCatalogTransport, "_prep_wrapped_messages" + ) as prep: + client = DataCatalogClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataCatalogTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataCatalogClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index 427f8b6b..cf831bf6 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -171,6 +171,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -187,6 +188,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -203,6 +205,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -222,6 +225,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -242,6 +246,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -262,6 +267,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -283,6 +289,7 @@ def test_data_catalog_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -313,6 +320,7 @@ def test_data_catalog_client_client_options_scopes( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -343,6 +351,7 @@ def test_data_catalog_client_client_options_credentials_file( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -360,6 +369,7 @@ def test_data_catalog_client_client_options_from_dict(): api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -587,8 +597,8 @@ def test_search_catalog_pages(): RuntimeError, ) pages = list(client.search_catalog(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -660,10 +670,10 @@ async def test_search_catalog_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.search_catalog(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.search_catalog(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_entry_group( @@ -1800,8 +1810,8 @@ def test_list_entry_groups_pages(): RuntimeError, ) pages = list(client.list_entry_groups(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1877,10 +1887,10 @@ async def test_list_entry_groups_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_entry_groups(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_entry_groups(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_entry( @@ -3080,8 +3090,8 @@ def test_list_entries_pages(): RuntimeError, ) pages = list(client.list_entries(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -3153,10 +3163,10 @@ async def test_list_entries_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_entries(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_entries(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_create_tag_template( @@ -5722,8 +5732,8 @@ def test_list_tags_pages(): RuntimeError, ) pages = list(client.list_tags(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -5777,10 +5787,10 @@ async def test_list_tags_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_tags(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_tags(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_set_iam_policy( @@ -6694,28 +6704,36 @@ def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel -def test_tag_template_path(): +def test_tag_path(): project = "squid" location = "clam" - tag_template = "whelk" + entry_group = "whelk" + entry = "octopus" + tag = "oyster" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( - project=project, location=location, tag_template=tag_template, + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( + project=project, + location=location, + entry_group=entry_group, + entry=entry, + tag=tag, ) - actual = DataCatalogClient.tag_template_path(project, location, tag_template) + actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) assert expected == actual -def test_parse_tag_template_path(): +def test_parse_tag_path(): expected = { - "project": "octopus", - "location": "oyster", - "tag_template": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "entry_group": "mussel", + "entry": "winkle", + "tag": "nautilus", } - path = DataCatalogClient.tag_template_path(**expected) + path = DataCatalogClient.tag_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_path(path) + actual = DataCatalogClient.parse_tag_path(path) assert expected == actual @@ -6746,31 +6764,6 @@ def test_parse_entry_path(): assert expected == actual -def test_entry_group_path(): - project = "squid" - location = "clam" - entry_group = "whelk" - - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( - project=project, location=location, entry_group=entry_group, - ) - actual = DataCatalogClient.entry_group_path(project, location, entry_group) - assert expected == actual - - -def test_parse_entry_group_path(): - expected = { - "project": "octopus", - "location": "oyster", - "entry_group": "nudibranch", - } - path = DataCatalogClient.entry_group_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_group_path(path) - assert expected == actual - - def test_tag_template_field_path(): project = "squid" location = "clam" @@ -6800,34 +6793,72 @@ def test_parse_tag_template_field_path(): assert expected == actual -def test_tag_path(): +def test_entry_group_path(): project = "squid" location = "clam" entry_group = "whelk" - entry = "octopus" - tag = "oyster" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( - project=project, - location=location, - entry_group=entry_group, - entry=entry, - tag=tag, + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( + project=project, location=location, entry_group=entry_group, ) - actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) + actual = DataCatalogClient.entry_group_path(project, location, entry_group) assert expected == actual -def test_parse_tag_path(): +def test_parse_entry_group_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - "entry": "winkle", - "tag": "nautilus", + "project": "octopus", + "location": "oyster", + "entry_group": "nudibranch", } - path = DataCatalogClient.tag_path(**expected) + path = DataCatalogClient.entry_group_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_path(path) + actual = DataCatalogClient.parse_entry_group_path(path) + assert expected == actual + + +def test_tag_template_path(): + project = "squid" + location = "clam" + tag_template = "whelk" + + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( + project=project, location=location, tag_template=tag_template, + ) + actual = DataCatalogClient.tag_template_path(project, location, tag_template) assert expected == actual + + +def test_parse_tag_template_path(): + expected = { + "project": "octopus", + "location": "oyster", + "tag_template": "nudibranch", + } + path = DataCatalogClient.tag_template_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataCatalogTransport, "_prep_wrapped_messages" + ) as prep: + client = DataCatalogClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataCatalogTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataCatalogClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index de5f0342..51de69f0 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -168,6 +168,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -184,6 +185,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -200,6 +202,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -219,6 +222,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -239,6 +243,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -259,6 +264,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -280,6 +286,7 @@ def test_policy_tag_manager_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -310,6 +317,7 @@ def test_policy_tag_manager_client_client_options_scopes( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -340,6 +348,7 @@ def test_policy_tag_manager_client_client_options_credentials_file( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -359,6 +368,7 @@ def test_policy_tag_manager_client_client_options_from_dict(): api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1285,8 +1295,8 @@ def test_list_taxonomies_pages(): RuntimeError, ) pages = list(client.list_taxonomies(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1362,10 +1372,10 @@ async def test_list_taxonomies_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_taxonomies(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_taxonomies(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_get_taxonomy( @@ -2548,8 +2558,8 @@ def test_list_policy_tags_pages(): RuntimeError, ) pages = list(client.list_policy_tags(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -2631,10 +2641,10 @@ async def test_list_policy_tags_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_policy_tags(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_policy_tags(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_get_policy_tag( @@ -3629,55 +3639,76 @@ def test_policy_tag_manager_grpc_asyncio_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel -def test_taxonomy_path(): +def test_policy_tag_path(): project = "squid" location = "clam" taxonomy = "whelk" + policy_tag = "octopus" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( - project=project, location=location, taxonomy=taxonomy, + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( + project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, + ) + actual = PolicyTagManagerClient.policy_tag_path( + project, location, taxonomy, policy_tag ) - actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) assert expected == actual -def test_parse_taxonomy_path(): +def test_parse_policy_tag_path(): expected = { - "project": "octopus", - "location": "oyster", - "taxonomy": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "taxonomy": "cuttlefish", + "policy_tag": "mussel", } - path = PolicyTagManagerClient.taxonomy_path(**expected) + path = PolicyTagManagerClient.policy_tag_path(**expected) # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_taxonomy_path(path) + actual = PolicyTagManagerClient.parse_policy_tag_path(path) assert expected == actual -def test_policy_tag_path(): +def test_taxonomy_path(): project = "squid" location = "clam" taxonomy = "whelk" - policy_tag = "octopus" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format( - project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, - ) - actual = PolicyTagManagerClient.policy_tag_path( - project, location, taxonomy, policy_tag + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, ) + actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) assert expected == actual -def test_parse_policy_tag_path(): +def test_parse_taxonomy_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "taxonomy": "cuttlefish", - "policy_tag": "mussel", + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", } - path = PolicyTagManagerClient.policy_tag_path(**expected) + path = PolicyTagManagerClient.taxonomy_path(**expected) # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_policy_tag_path(path) + actual = PolicyTagManagerClient.parse_taxonomy_path(path) assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index 9676d368..4b8f9dd6 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -27,6 +27,7 @@ from google import auth from google.api_core import client_options from google.api_core import exceptions +from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.auth import credentials @@ -179,6 +180,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -195,6 +197,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -211,6 +214,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -230,6 +234,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -250,6 +255,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -270,6 +276,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -291,6 +298,7 @@ def test_policy_tag_manager_serialization_client_client_options( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -325,6 +333,7 @@ def test_policy_tag_manager_serialization_client_client_options_scopes( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -359,6 +368,7 @@ def test_policy_tag_manager_serialization_client_client_options_credentials_file api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -378,6 +388,7 @@ def test_policy_tag_manager_serialization_client_client_options_from_dict(): api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -965,3 +976,24 @@ def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel_mtls_wi quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + client = PolicyTagManagerSerializationClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PolicyTagManagerSerializationClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) From 7f1b8ee4579c4306d9b6a56498a0755803b9eadf Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Thu, 3 Sep 2020 06:11:48 -0700 Subject: [PATCH 2/3] chore(py-library): enable snippet-bot Co-authored-by: Benjamin E. Coe Source-Author: Takashi Matsuo Source-Date: Tue Sep 1 17:14:08 2020 +0000 Source-Repo: googleapis/synthtool Source-Sha: d91dd8aac77f7a9c5506c238038a26fa4f9e361e Source-Link: https://github.com/googleapis/synthtool/commit/d91dd8aac77f7a9c5506c238038a26fa4f9e361e --- .github/snippet-bot.yml | 0 synth.metadata | 5 +++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 .github/snippet-bot.yml diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 00000000..e69de29b diff --git a/synth.metadata b/synth.metadata index bfa323b5..c17741cd 100644 --- a/synth.metadata +++ b/synth.metadata @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" + "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" + "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" } } ], @@ -58,6 +58,7 @@ ".github/ISSUE_TEMPLATE/support_request.md", ".github/PULL_REQUEST_TEMPLATE.md", ".github/release-please.yml", + ".github/snippet-bot.yml", ".gitignore", ".kokoro/build.sh", ".kokoro/continuous/common.cfg", From 6978e157dd3605f40c0b34960701ac30efbbae43 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 17 Nov 2020 02:06:29 +0000 Subject: [PATCH 3/3] chore: manual regen --- .kokoro/docs/common.cfg | 2 +- .kokoro/populate-secrets.sh | 43 + .kokoro/release/common.cfg | 50 +- .kokoro/samples/python3.6/common.cfg | 6 + .kokoro/samples/python3.7/common.cfg | 6 + .kokoro/samples/python3.8/common.cfg | 6 + .kokoro/test-samples.sh | 8 +- .kokoro/trampoline.sh | 15 +- CODE_OF_CONDUCT.md | 123 +- CONTRIBUTING.rst | 19 - docs/conf.py | 4 +- docs/datacatalog_v1/types.rst | 1 + docs/datacatalog_v1beta1/types.rst | 1 + .../services/data_catalog/async_client.py | 157 +- .../services/data_catalog/client.py | 180 +- .../services/data_catalog/transports/grpc.py | 64 +- .../data_catalog/transports/grpc_asyncio.py | 55 +- .../cloud/datacatalog_v1/types/datacatalog.py | 6 +- google/cloud/datacatalog_v1/types/schema.py | 4 +- google/cloud/datacatalog_v1/types/tags.py | 4 +- google/cloud/datacatalog_v1beta1/__init__.py | 4 +- .../services/data_catalog/async_client.py | 177 +- .../services/data_catalog/client.py | 180 +- .../services/data_catalog/transports/base.py | 20 +- .../services/data_catalog/transports/grpc.py | 64 +- .../data_catalog/transports/grpc_asyncio.py | 55 +- .../policy_tag_manager/async_client.py | 90 +- .../services/policy_tag_manager/client.py | 164 +- .../policy_tag_manager/transports/grpc.py | 64 +- .../transports/grpc_asyncio.py | 55 +- .../async_client.py | 68 +- .../client.py | 180 +- .../transports/grpc.py | 64 +- .../transports/grpc_asyncio.py | 55 +- .../datacatalog_v1beta1/types/datacatalog.py | 6 +- .../types/policytagmanager.py | 12 +- .../types/policytagmanagerserialization.py | 4 +- .../cloud/datacatalog_v1beta1/types/schema.py | 4 +- .../cloud/datacatalog_v1beta1/types/tags.py | 4 +- noxfile.py | 10 +- samples/snippets/noxfile.py | 24 +- scripts/decrypt-secrets.sh | 15 +- scripts/fixup_datacatalog_v1_keywords.py | 1 + scripts/fixup_datacatalog_v1beta1_keywords.py | 1 + synth.metadata | 170 +- .../gapic/datacatalog_v1/test_data_catalog.py | 1525 +++++++++------- .../datacatalog_v1beta1/test_data_catalog.py | 1547 ++++++++++------- .../test_policy_tag_manager.py | 1047 ++++++----- .../test_policy_tag_manager_serialization.py | 675 ++++--- 49 files changed, 4297 insertions(+), 2742 deletions(-) create mode 100755 .kokoro/populate-secrets.sh diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 6039d7e9..389a65e6 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..f5251425 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 77edf9ab..3156ce87 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-datacatalog/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 1e4766c2..9e55ece3 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datacatalog/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 3a67b23d..d1630ce2 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datacatalog/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 5ffafdbc..b53c2553 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-datacatalog/.kokoro/test-samples.sh" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index b8ac5ae1..49ac61fa 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f..f39236e9 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f602..039f4368 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 1800a1f1..cdd8c7f3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/docs/conf.py b/docs/conf.py index 015d055f..01be52f5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -39,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -348,6 +349,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/docs/datacatalog_v1/types.rst b/docs/datacatalog_v1/types.rst index cb94a5e5..19f12ef8 100644 --- a/docs/datacatalog_v1/types.rst +++ b/docs/datacatalog_v1/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Datacatalog v1 API .. automodule:: google.cloud.datacatalog_v1.types :members: + :show-inheritance: diff --git a/docs/datacatalog_v1beta1/types.rst b/docs/datacatalog_v1beta1/types.rst index 75ee2bb4..a1baedaf 100644 --- a/docs/datacatalog_v1beta1/types.rst +++ b/docs/datacatalog_v1beta1/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Datacatalog v1beta1 API .. automodule:: google.cloud.datacatalog_v1beta1.types :members: + :show-inheritance: diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 88de393c..e5cb1dbf 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -56,19 +56,56 @@ class DataCatalogAsyncClient: DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT + entry_path = staticmethod(DataCatalogClient.entry_path) + parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) + entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) tag_path = staticmethod(DataCatalogClient.tag_path) + parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) + tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + parse_tag_template_field_path = staticmethod( + DataCatalogClient.parse_tag_template_field_path + ) - entry_path = staticmethod(DataCatalogClient.entry_path) + common_billing_account_path = staticmethod( + DataCatalogClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataCatalogClient.parse_common_billing_account_path + ) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + common_folder_path = staticmethod(DataCatalogClient.common_folder_path) + parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + common_organization_path = staticmethod(DataCatalogClient.common_organization_path) + parse_common_organization_path = staticmethod( + DataCatalogClient.parse_common_organization_path + ) - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + common_project_path = staticmethod(DataCatalogClient.common_project_path) + parse_common_project_path = staticmethod( + DataCatalogClient.parse_common_project_path + ) + + common_location_path = staticmethod(DataCatalogClient.common_location_path) + parse_common_location_path = staticmethod( + DataCatalogClient.parse_common_location_path + ) from_service_account_file = DataCatalogClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DataCatalogTransport: + """Return the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DataCatalogClient).get_transport_class, type(DataCatalogClient) ) @@ -95,16 +132,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -196,7 +236,8 @@ async def search_catalog( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([scope, query]): + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -319,7 +360,8 @@ async def create_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, entry_group_id, entry_group]): + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -402,7 +444,8 @@ async def get_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, read_mask]): + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -494,7 +537,8 @@ async def update_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([entry_group, update_mask]): + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -568,7 +612,8 @@ async def delete_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -644,7 +689,8 @@ async def list_entry_groups( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -763,7 +809,8 @@ async def create_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, entry_id, entry]): + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -882,7 +929,8 @@ async def update_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([entry, update_mask]): + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -958,7 +1006,8 @@ async def delete_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1038,7 +1087,8 @@ async def get_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1183,7 +1233,8 @@ async def list_entries( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1295,7 +1346,8 @@ async def create_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, tag_template_id, tag_template]): + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1378,7 +1430,8 @@ async def get_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1478,7 +1531,8 @@ async def update_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([tag_template, update_mask]): + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1562,7 +1616,8 @@ async def delete_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, force]): + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1661,9 +1716,8 @@ async def create_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any( - [parent, tag_template_field_id, tag_template_field] - ): + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1770,7 +1824,8 @@ async def update_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, tag_template_field, update_mask]): + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1857,7 +1912,8 @@ async def rename_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, new_tag_template_field_id]): + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1940,7 +1996,8 @@ async def delete_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, force]): + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2034,7 +2091,8 @@ async def create_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, tag]): + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2121,7 +2179,8 @@ async def update_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([tag, update_mask]): + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2189,7 +2248,8 @@ async def delete_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2269,7 +2329,8 @@ async def list_tags( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2433,7 +2494,8 @@ async def set_iam_policy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2445,13 +2507,7 @@ async def set_iam_policy( request = iam_policy.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2595,7 +2651,8 @@ async def get_iam_policy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2607,13 +2664,7 @@ async def get_iam_policy( request = iam_policy.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py index ab11c91f..dae42e6a 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -138,6 +140,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DataCatalogTransport: + """Return the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str: """Return a fully-qualified entry string.""" @@ -226,12 +237,71 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DataCatalogTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DataCatalogTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -245,23 +315,26 @@ def __init__( transport (Union[str, ~.DataCatalogTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -269,29 +342,47 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -315,10 +406,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -2662,13 +2752,7 @@ def set_iam_policy( request = iam_policy.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2821,13 +2905,7 @@ def get_iam_policy( request = iam_policy.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index 3fd33e16..9150d2ac 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.datacatalog_v1.types import datacatalog @@ -61,6 +61,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -81,20 +82,22 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -103,6 +106,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -110,7 +115,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -141,6 +152,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -201,19 +230,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 8851137f..49b84e3a 100644 --- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -103,6 +105,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -124,14 +127,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -146,6 +151,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -153,13 +160,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -179,6 +197,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -199,13 +235,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/datacatalog_v1/types/datacatalog.py b/google/cloud/datacatalog_v1/types/datacatalog.py index fdb31546..a02ed993 100644 --- a/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/google/cloud/datacatalog_v1/types/datacatalog.py @@ -537,7 +537,7 @@ class Entry(proto.Message): Output only when Entry is of type in the EntryType enum. For entries with user_specified_type, this field is optional and defaults to an empty string. - type (~.datacatalog.EntryType): + type_ (~.datacatalog.EntryType): The type of the entry. Only used for Entries with types in the EntryType enum. @@ -601,7 +601,7 @@ class Entry(proto.Message): linked_resource = proto.Field(proto.STRING, number=9) - type = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) + type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type") @@ -1034,7 +1034,7 @@ class ListEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=Entry,) + entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/datacatalog_v1/types/schema.py b/google/cloud/datacatalog_v1/types/schema.py index 4a51a122..98560462 100644 --- a/google/cloud/datacatalog_v1/types/schema.py +++ b/google/cloud/datacatalog_v1/types/schema.py @@ -42,7 +42,7 @@ class ColumnSchema(proto.Message): Attributes: column (str): Required. Name of the column. - type (str): + type_ (str): Required. Type of the column. description (str): Optional. Description of the column. Default @@ -59,7 +59,7 @@ class ColumnSchema(proto.Message): column = proto.Field(proto.STRING, number=6) - type = proto.Field(proto.STRING, number=1) + type_ = proto.Field(proto.STRING, number=1) description = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/datacatalog_v1/types/tags.py b/google/cloud/datacatalog_v1/types/tags.py index 8e6e94e0..4ef4efa7 100644 --- a/google/cloud/datacatalog_v1/types/tags.py +++ b/google/cloud/datacatalog_v1/types/tags.py @@ -202,7 +202,7 @@ class TagTemplateField(proto.Message): display_name (str): The display name for this field. Defaults to an empty string. - type (~.tags.FieldType): + type_ (~.tags.FieldType): Required. The type of value this tag field can contain. is_required (bool): @@ -221,7 +221,7 @@ class TagTemplateField(proto.Message): display_name = proto.Field(proto.STRING, number=1) - type = proto.Field(proto.MESSAGE, number=2, message="FieldType",) + type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",) is_required = proto.Field(proto.BOOL, number=3) diff --git a/google/cloud/datacatalog_v1beta1/__init__.py b/google/cloud/datacatalog_v1beta1/__init__.py index 16534418..be0bdd8e 100644 --- a/google/cloud/datacatalog_v1beta1/__init__.py +++ b/google/cloud/datacatalog_v1beta1/__init__.py @@ -103,6 +103,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "CreateTaxonomyRequest", + "DataCatalogClient", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -140,7 +141,6 @@ "LookupEntryRequest", "PolicyTag", "PolicyTagManagerClient", - "PolicyTagManagerSerializationClient", "RenameTagTemplateFieldRequest", "Schema", "SearchCatalogRequest", @@ -165,5 +165,5 @@ "UpdateTagTemplateRequest", "UpdateTaxonomyRequest", "ViewSpec", - "DataCatalogClient", + "PolicyTagManagerSerializationClient", ) diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py index 9ed89045..bec3d14c 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -56,19 +56,56 @@ class DataCatalogAsyncClient: DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT + entry_path = staticmethod(DataCatalogClient.entry_path) + parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) + entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) tag_path = staticmethod(DataCatalogClient.tag_path) + parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) + tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + parse_tag_template_field_path = staticmethod( + DataCatalogClient.parse_tag_template_field_path + ) - entry_path = staticmethod(DataCatalogClient.entry_path) + common_billing_account_path = staticmethod( + DataCatalogClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataCatalogClient.parse_common_billing_account_path + ) - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + common_folder_path = staticmethod(DataCatalogClient.common_folder_path) + parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + common_organization_path = staticmethod(DataCatalogClient.common_organization_path) + parse_common_organization_path = staticmethod( + DataCatalogClient.parse_common_organization_path + ) - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + common_project_path = staticmethod(DataCatalogClient.common_project_path) + parse_common_project_path = staticmethod( + DataCatalogClient.parse_common_project_path + ) + + common_location_path = staticmethod(DataCatalogClient.common_location_path) + parse_common_location_path = staticmethod( + DataCatalogClient.parse_common_location_path + ) from_service_account_file = DataCatalogClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DataCatalogTransport: + """Return the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DataCatalogClient).get_transport_class, type(DataCatalogClient) ) @@ -95,16 +132,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -196,7 +236,8 @@ async def search_catalog( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([scope, query]): + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -301,7 +342,8 @@ async def create_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, entry_group_id, entry_group]): + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -390,7 +432,8 @@ async def update_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([entry_group, update_mask]): + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -474,7 +517,8 @@ async def get_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, read_mask]): + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -499,7 +543,7 @@ async def get_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -554,7 +598,8 @@ async def delete_entry_group( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -577,7 +622,7 @@ async def delete_entry_group( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -638,7 +683,8 @@ async def list_entry_groups( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -751,7 +797,8 @@ async def create_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, entry_id, entry]): + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -870,7 +917,8 @@ async def update_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([entry, update_mask]): + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -946,7 +994,8 @@ async def delete_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -969,7 +1018,7 @@ async def delete_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1034,7 +1083,8 @@ async def get_entry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1057,7 +1107,7 @@ async def get_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1128,7 +1178,7 @@ async def lookup_entry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1183,7 +1233,8 @@ async def list_entries( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1289,7 +1340,8 @@ async def create_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, tag_template_id, tag_template]): + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1372,7 +1424,8 @@ async def get_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1395,7 +1448,7 @@ async def get_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1480,7 +1533,8 @@ async def update_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([tag_template, update_mask]): + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1564,7 +1618,8 @@ async def delete_tag_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, force]): + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1589,7 +1644,7 @@ async def delete_tag_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1671,9 +1726,8 @@ async def create_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any( - [parent, tag_template_field_id, tag_template_field] - ): + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1780,7 +1834,8 @@ async def update_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, tag_template_field, update_mask]): + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1867,7 +1922,8 @@ async def rename_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, new_tag_template_field_id]): + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1950,7 +2006,8 @@ async def delete_tag_template_field( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, force]): + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1975,7 +2032,7 @@ async def delete_tag_template_field( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -2053,7 +2110,8 @@ async def create_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, tag]): + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2140,7 +2198,8 @@ async def update_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([tag, update_mask]): + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2208,7 +2267,8 @@ async def delete_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2231,7 +2291,7 @@ async def delete_tag( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -2297,7 +2357,8 @@ async def list_tags( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2320,7 +2381,7 @@ async def list_tags( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -2463,7 +2524,8 @@ async def set_iam_policy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2475,13 +2537,7 @@ async def set_iam_policy( request = iam_policy.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2625,7 +2681,8 @@ async def get_iam_policy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2637,13 +2694,7 @@ async def get_iam_policy( request = iam_policy.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 6fcee78d..da6b34fe 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -138,6 +140,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DataCatalogTransport: + """Return the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str: """Return a fully-qualified entry string.""" @@ -226,12 +237,71 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DataCatalogTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DataCatalogTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the data catalog client. @@ -245,23 +315,26 @@ def __init__( transport (Union[str, ~.DataCatalogTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -269,29 +342,47 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -315,10 +406,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -2654,13 +2744,7 @@ def set_iam_policy( request = iam_policy.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2813,13 +2897,7 @@ def get_iam_policy( request = iam_policy.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py index 097d283d..38de8373 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -126,7 +126,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -139,7 +139,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -161,7 +161,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -174,7 +174,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -187,7 +187,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -206,7 +206,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -222,7 +222,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -250,7 +250,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -269,7 +269,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -282,7 +282,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py index cbd4a26a..4a34e3f9 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import datacatalog @@ -61,6 +61,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -81,20 +82,22 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -103,6 +106,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -110,7 +115,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -141,6 +152,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -201,19 +230,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py index df77b9b0..b8670aa2 100644 --- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -103,6 +105,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -124,14 +127,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -146,6 +151,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -153,13 +160,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -179,6 +197,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -199,13 +235,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py index 3df25722..759d80df 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -49,12 +49,51 @@ class PolicyTagManagerAsyncClient: DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) - + parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) + + common_billing_account_path = staticmethod( + PolicyTagManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + PolicyTagManagerClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + PolicyTagManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerClient.parse_common_organization_path + ) + + common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) + parse_common_project_path = staticmethod( + PolicyTagManagerClient.parse_common_project_path + ) + + common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) + parse_common_location_path = staticmethod( + PolicyTagManagerClient.parse_common_location_path + ) from_service_account_file = PolicyTagManagerClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> PolicyTagManagerTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient) ) @@ -81,16 +120,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -153,7 +195,8 @@ async def create_taxonomy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, taxonomy]): + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -223,7 +266,8 @@ async def delete_taxonomy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -298,7 +342,8 @@ async def update_taxonomy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([taxonomy]): + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -375,7 +420,8 @@ async def list_taxonomies( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -457,7 +503,8 @@ async def get_taxonomy( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -541,7 +588,8 @@ async def create_policy_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, policy_tag]): + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -610,7 +658,8 @@ async def delete_policy_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -687,7 +736,8 @@ async def update_policy_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([policy_tag]): + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -763,7 +813,8 @@ async def list_policy_tags( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -846,7 +897,8 @@ async def get_policy_tag( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index 46a8a602..ffbb1f7f 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -132,6 +134,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> PolicyTagManagerTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def policy_tag_path( project: str, location: str, taxonomy: str, policy_tag: str, @@ -169,12 +180,71 @@ def parse_taxonomy_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, PolicyTagManagerTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager client. @@ -188,23 +258,26 @@ def __init__( transport (Union[str, ~.PolicyTagManagerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -212,29 +285,47 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -258,10 +349,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py index d14dd424..ee1e1daa 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanager @@ -60,6 +60,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -80,20 +81,22 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -102,6 +105,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -109,7 +114,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -140,6 +151,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -200,19 +229,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py index d75d30bf..71d83118 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -102,6 +104,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -123,14 +126,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -145,6 +150,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -152,13 +159,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -178,6 +196,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -198,13 +234,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py index 5e24730a..cfbd3082 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -47,11 +47,60 @@ class PolicyTagManagerSerializationAsyncClient: DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT + taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) + parse_taxonomy_path = staticmethod( + PolicyTagManagerSerializationClient.parse_taxonomy_path + ) + + common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_organization_path + ) + + common_project_path = staticmethod( + PolicyTagManagerSerializationClient.common_project_path + ) + parse_common_project_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_project_path + ) + + common_location_path = staticmethod( + PolicyTagManagerSerializationClient.common_location_path + ) + parse_common_location_path = staticmethod( + PolicyTagManagerSerializationClient.parse_common_location_path + ) + from_service_account_file = ( PolicyTagManagerSerializationClient.from_service_account_file ) from_service_account_json = from_service_account_file + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(PolicyTagManagerSerializationClient).get_transport_class, type(PolicyTagManagerSerializationClient), @@ -79,16 +128,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index d5d77322..65a709e3 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -137,12 +139,96 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Return the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def taxonomy_path(project: str, location: str, taxonomy: str,) -> str: + """Return a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, + ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str, str]: + """Parse a taxonomy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, PolicyTagManagerSerializationTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerSerializationTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the policy tag manager serialization client. @@ -156,23 +242,26 @@ def __init__( transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -180,29 +269,47 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -226,10 +333,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py index c724aa0b..84f435e9 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization @@ -60,6 +60,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -80,20 +81,22 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -102,6 +105,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -109,7 +114,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -140,6 +151,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -200,19 +229,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py index 243cc091..a93a8572 100644 --- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -102,6 +104,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -123,14 +126,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -145,6 +150,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -152,13 +159,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -178,6 +196,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -198,13 +234,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/datacatalog_v1beta1/types/datacatalog.py b/google/cloud/datacatalog_v1beta1/types/datacatalog.py index 7bbbae2f..ee843cac 100644 --- a/google/cloud/datacatalog_v1beta1/types/datacatalog.py +++ b/google/cloud/datacatalog_v1beta1/types/datacatalog.py @@ -490,7 +490,7 @@ class Entry(proto.Message): Output only when Entry is of type in the EntryType enum. For entries with user_specified_type, this field is optional and defaults to an empty string. - type (~.datacatalog.EntryType): + type_ (~.datacatalog.EntryType): The type of the entry. Only used for Entries with types in the EntryType enum. @@ -555,7 +555,7 @@ class Entry(proto.Message): linked_resource = proto.Field(proto.STRING, number=9) - type = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) + type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",) user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type") @@ -988,7 +988,7 @@ class ListEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=Entry,) + entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py index 259be1b3..ad1694c3 100644 --- a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py +++ b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py @@ -152,7 +152,7 @@ class CreateTaxonomyRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - taxonomy = proto.Field(proto.MESSAGE, number=2, message=Taxonomy,) + taxonomy = proto.Field(proto.MESSAGE, number=2, message="Taxonomy",) class DeleteTaxonomyRequest(proto.Message): @@ -185,7 +185,7 @@ class UpdateTaxonomyRequest(proto.Message): to update. """ - taxonomy = proto.Field(proto.MESSAGE, number=1, message=Taxonomy,) + taxonomy = proto.Field(proto.MESSAGE, number=1, message="Taxonomy",) update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) @@ -231,7 +231,7 @@ class ListTaxonomiesResponse(proto.Message): def raw_page(self): return self - taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message=Taxonomy,) + taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message="Taxonomy",) next_page_token = proto.Field(proto.STRING, number=2) @@ -263,7 +263,7 @@ class CreatePolicyTagRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - policy_tag = proto.Field(proto.MESSAGE, number=2, message=PolicyTag,) + policy_tag = proto.Field(proto.MESSAGE, number=2, message="PolicyTag",) class DeletePolicyTagRequest(proto.Message): @@ -300,7 +300,7 @@ class UpdatePolicyTagRequest(proto.Message): to update. """ - policy_tag = proto.Field(proto.MESSAGE, number=1, message=PolicyTag,) + policy_tag = proto.Field(proto.MESSAGE, number=1, message="PolicyTag",) update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) @@ -347,7 +347,7 @@ class ListPolicyTagsResponse(proto.Message): def raw_page(self): return self - policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message=PolicyTag,) + policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message="PolicyTag",) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py index dd14cd86..2f76dbc7 100644 --- a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py +++ b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py @@ -116,7 +116,7 @@ class InlineSource(proto.Message): """ taxonomies = proto.RepeatedField( - proto.MESSAGE, number=1, message=SerializedTaxonomy, + proto.MESSAGE, number=1, message="SerializedTaxonomy", ) @@ -167,7 +167,7 @@ class ExportTaxonomiesResponse(proto.Message): """ taxonomies = proto.RepeatedField( - proto.MESSAGE, number=1, message=SerializedTaxonomy, + proto.MESSAGE, number=1, message="SerializedTaxonomy", ) diff --git a/google/cloud/datacatalog_v1beta1/types/schema.py b/google/cloud/datacatalog_v1beta1/types/schema.py index 55014c32..ebc56879 100644 --- a/google/cloud/datacatalog_v1beta1/types/schema.py +++ b/google/cloud/datacatalog_v1beta1/types/schema.py @@ -42,7 +42,7 @@ class ColumnSchema(proto.Message): Attributes: column (str): Required. Name of the column. - type (str): + type_ (str): Required. Type of the column. description (str): Optional. Description of the column. Default @@ -59,7 +59,7 @@ class ColumnSchema(proto.Message): column = proto.Field(proto.STRING, number=6) - type = proto.Field(proto.STRING, number=1) + type_ = proto.Field(proto.STRING, number=1) description = proto.Field(proto.STRING, number=2) diff --git a/google/cloud/datacatalog_v1beta1/types/tags.py b/google/cloud/datacatalog_v1beta1/types/tags.py index ddd5cf1f..098fd3c2 100644 --- a/google/cloud/datacatalog_v1beta1/types/tags.py +++ b/google/cloud/datacatalog_v1beta1/types/tags.py @@ -202,7 +202,7 @@ class TagTemplateField(proto.Message): display_name (str): The display name for this field. Defaults to an empty string. - type (~.tags.FieldType): + type_ (~.tags.FieldType): Required. The type of value this tag field can contain. is_required (bool): @@ -221,7 +221,7 @@ class TagTemplateField(proto.Message): display_name = proto.Field(proto.STRING, number=1) - type = proto.Field(proto.MESSAGE, number=2, message="FieldType",) + type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",) is_required = proto.Field(proto.BOOL, number=3) diff --git a/noxfile.py b/noxfile.py index 14e819be..5ba11445 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -72,7 +72,9 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. @@ -173,7 +175,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index ba55d7ce..b90eef00 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -39,6 +39,10 @@ # You can opt out from the test for specific Python versions. 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string @@ -132,7 +136,10 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ @@ -141,8 +148,18 @@ def lint(session): "." ] session.run("flake8", *args) +# +# Black +# +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + # # Sample Tests # @@ -201,6 +218,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index ff599eb2..21f6d2a2 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py index 9ad22462..04befa38 100644 --- a/scripts/fixup_datacatalog_v1_keywords.py +++ b/scripts/fixup_datacatalog_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/scripts/fixup_datacatalog_v1beta1_keywords.py b/scripts/fixup_datacatalog_v1beta1_keywords.py index e48632cc..bf43018a 100644 --- a/scripts/fixup_datacatalog_v1beta1_keywords.py +++ b/scripts/fixup_datacatalog_v1beta1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/synth.metadata b/synth.metadata index c17741cd..0f3327ea 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-datacatalog.git", - "sha": "2aac68cae6ac9f96841b9e314af72c0a052ee13f" + "remote": "git@github.com:googleapis/python-datacatalog", + "sha": "7f1b8ee4579c4306d9b6a56498a0755803b9eadf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "ddaa2026caf2eb00444a14b08500553824a7182a", - "internalRef": "329845759" + "sha": "754a312a0d01cfc1484d397872ff45e5565af0da", + "internalRef": "342758098" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" + "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d91dd8aac77f7a9c5506c238038a26fa4f9e361e" + "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116" } } ], @@ -49,163 +49,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/datacatalog_v1/services.rst", - "docs/datacatalog_v1/types.rst", - "docs/datacatalog_v1beta1/services.rst", - "docs/datacatalog_v1beta1/types.rst", - "docs/multiprocessing.rst", - "google/cloud/datacatalog/__init__.py", - "google/cloud/datacatalog/py.typed", - "google/cloud/datacatalog_v1/__init__.py", - "google/cloud/datacatalog_v1/proto/common.proto", - "google/cloud/datacatalog_v1/proto/datacatalog.proto", - "google/cloud/datacatalog_v1/proto/gcs_fileset_spec.proto", - "google/cloud/datacatalog_v1/proto/schema.proto", - "google/cloud/datacatalog_v1/proto/search.proto", - "google/cloud/datacatalog_v1/proto/table_spec.proto", - "google/cloud/datacatalog_v1/proto/tags.proto", - "google/cloud/datacatalog_v1/proto/timestamps.proto", - "google/cloud/datacatalog_v1/py.typed", - "google/cloud/datacatalog_v1/services/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/async_client.py", - "google/cloud/datacatalog_v1/services/data_catalog/client.py", - "google/cloud/datacatalog_v1/services/data_catalog/pagers.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/base.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py", - "google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1/types/__init__.py", - "google/cloud/datacatalog_v1/types/common.py", - "google/cloud/datacatalog_v1/types/datacatalog.py", - "google/cloud/datacatalog_v1/types/gcs_fileset_spec.py", - "google/cloud/datacatalog_v1/types/schema.py", - "google/cloud/datacatalog_v1/types/search.py", - "google/cloud/datacatalog_v1/types/table_spec.py", - "google/cloud/datacatalog_v1/types/tags.py", - "google/cloud/datacatalog_v1/types/timestamps.py", - "google/cloud/datacatalog_v1beta1/__init__.py", - "google/cloud/datacatalog_v1beta1/proto/common.proto", - "google/cloud/datacatalog_v1beta1/proto/datacatalog.proto", - "google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto", - "google/cloud/datacatalog_v1beta1/proto/policytagmanager.proto", - "google/cloud/datacatalog_v1beta1/proto/policytagmanagerserialization.proto", - "google/cloud/datacatalog_v1beta1/proto/schema.proto", - "google/cloud/datacatalog_v1beta1/proto/search.proto", - "google/cloud/datacatalog_v1beta1/proto/table_spec.proto", - "google/cloud/datacatalog_v1beta1/proto/tags.proto", - "google/cloud/datacatalog_v1beta1/proto/timestamps.proto", - "google/cloud/datacatalog_v1beta1/py.typed", - "google/cloud/datacatalog_v1beta1/services/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/client.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py", - "google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py", - "google/cloud/datacatalog_v1beta1/types/__init__.py", - "google/cloud/datacatalog_v1beta1/types/common.py", - "google/cloud/datacatalog_v1beta1/types/datacatalog.py", - "google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py", - "google/cloud/datacatalog_v1beta1/types/policytagmanager.py", - "google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py", - "google/cloud/datacatalog_v1beta1/types/schema.py", - "google/cloud/datacatalog_v1beta1/types/search.py", - "google/cloud/datacatalog_v1beta1/types/table_spec.py", - "google/cloud/datacatalog_v1beta1/types/tags.py", - "google/cloud/datacatalog_v1beta1/types/timestamps.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_datacatalog_v1_keywords.py", - "scripts/fixup_datacatalog_v1beta1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/datacatalog_v1/__init__.py", - "tests/unit/gapic/datacatalog_v1/test_data_catalog.py", - "tests/unit/gapic/datacatalog_v1beta1/__init__.py", - "tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py", - "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py", - "tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py" ] } \ No newline at end of file diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index fe695445..7851ae04 100644 --- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -39,12 +39,9 @@ from google.cloud.datacatalog_v1.types import common from google.cloud.datacatalog_v1.types import datacatalog from google.cloud.datacatalog_v1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec from google.cloud.datacatalog_v1.types import schema -from google.cloud.datacatalog_v1.types import schema as gcd_schema from google.cloud.datacatalog_v1.types import search from google.cloud.datacatalog_v1.types import table_spec -from google.cloud.datacatalog_v1.types import table_spec as gcd_table_spec from google.cloud.datacatalog_v1.types import tags from google.cloud.datacatalog_v1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore @@ -105,12 +102,12 @@ def test_data_catalog_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_client_get_transport_class(): @@ -164,15 +161,14 @@ def test_data_catalog_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -181,15 +177,14 @@ def test_data_catalog_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -198,95 +193,173 @@ def test_data_catalog_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), + ( + DataCatalogAsyncClient, + transports.DataCatalogGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), + ( + DataCatalogAsyncClient, + transports.DataCatalogGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient) +) +@mock.patch.object( + DataCatalogAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataCatalogAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_catalog_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -313,8 +386,7 @@ def test_data_catalog_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -344,8 +416,7 @@ def test_data_catalog_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -362,8 +433,7 @@ def test_data_catalog_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -381,7 +451,7 @@ def test_search_catalog( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], @@ -396,6 +466,7 @@ def test_search_catalog( assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogPager) assert response.next_page_token == "next_page_token_value" @@ -408,19 +479,19 @@ def test_search_catalog_from_dict(): @pytest.mark.asyncio -async def test_search_catalog_async(transport: str = "grpc_asyncio"): +async def test_search_catalog_async( + transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.SearchCatalogRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.search_catalog), "__call__" - ) as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse( @@ -435,7 +506,7 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) @@ -445,11 +516,16 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"): assert response.unreachable == ["unreachable_value"] +@pytest.mark.asyncio +async def test_search_catalog_async_from_dict(): + await test_search_catalog_async(request_type=dict) + + def test_search_catalog_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() @@ -494,9 +570,7 @@ async def test_search_catalog_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.search_catalog), "__call__" - ) as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() @@ -544,7 +618,7 @@ def test_search_catalog_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( @@ -579,7 +653,7 @@ def test_search_catalog_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( @@ -610,9 +684,7 @@ async def test_search_catalog_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.search_catalog), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -649,9 +721,7 @@ async def test_search_catalog_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.search_catalog), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -692,7 +762,7 @@ def test_create_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( @@ -710,6 +780,7 @@ def test_create_entry_group( assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -724,18 +795,20 @@ def test_create_entry_group_from_dict(): @pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = "grpc_asyncio"): +async def test_create_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -752,7 +825,7 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -764,6 +837,11 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + + def test_create_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -774,7 +852,7 @@ def test_create_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() @@ -801,7 +879,7 @@ async def test_create_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() @@ -824,7 +902,7 @@ def test_create_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -869,7 +947,7 @@ async def test_create_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -924,7 +1002,7 @@ def test_get_entry_group( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( name="name_value", @@ -941,6 +1019,7 @@ def test_get_entry_group( assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -955,19 +1034,19 @@ def test_get_entry_group_from_dict(): @pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = "grpc_asyncio"): +async def test_get_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup( @@ -983,7 +1062,7 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -995,6 +1074,11 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + + def test_get_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1004,7 +1088,7 @@ def test_get_entry_group_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = datacatalog.EntryGroup() client.get_entry_group(request) @@ -1029,9 +1113,7 @@ async def test_get_entry_group_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) @@ -1052,7 +1134,7 @@ def test_get_entry_group_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1090,9 +1172,7 @@ async def test_get_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1142,7 +1222,7 @@ def test_update_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( @@ -1160,6 +1240,7 @@ def test_update_entry_group( assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -1174,18 +1255,20 @@ def test_update_entry_group_from_dict(): @pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = "grpc_asyncio"): +async def test_update_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1202,7 +1285,7 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -1214,6 +1297,11 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + + def test_update_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1224,7 +1312,7 @@ def test_update_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() @@ -1253,7 +1341,7 @@ async def test_update_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() @@ -1278,7 +1366,7 @@ def test_update_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1319,7 +1407,7 @@ async def test_update_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1371,7 +1459,7 @@ def test_delete_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1393,18 +1481,20 @@ def test_delete_entry_group_from_dict(): @pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = "grpc_asyncio"): +async def test_delete_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1415,12 +1505,17 @@ async def test_delete_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + + def test_delete_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1431,7 +1526,7 @@ def test_delete_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = None @@ -1458,7 +1553,7 @@ async def test_delete_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1479,7 +1574,7 @@ def test_delete_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1513,7 +1608,7 @@ async def test_delete_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1556,7 +1651,7 @@ def test_list_entry_groups( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse( @@ -1572,6 +1667,7 @@ def test_list_entry_groups( assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) assert response.next_page_token == "next_page_token_value" @@ -1582,18 +1678,20 @@ def test_list_entry_groups_from_dict(): @pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): +async def test_list_entry_groups_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListEntryGroupsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1608,7 +1706,7 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsAsyncPager) @@ -1616,6 +1714,11 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + + def test_list_entry_groups_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1626,7 +1729,7 @@ def test_list_entry_groups_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1653,7 +1756,7 @@ async def test_list_entry_groups_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() @@ -1676,7 +1779,7 @@ def test_list_entry_groups_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1710,7 +1813,7 @@ async def test_list_entry_groups_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1747,7 +1850,7 @@ def test_list_entry_groups_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1789,7 +1892,7 @@ def test_list_entry_groups_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1823,7 +1926,7 @@ async def test_list_entry_groups_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), + type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1864,7 +1967,7 @@ async def test_list_entry_groups_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), + type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1908,14 +2011,14 @@ def test_create_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -1931,6 +2034,7 @@ def test_create_entry( assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -1947,19 +2051,19 @@ def test_create_entry_from_dict(): @pytest.mark.asyncio -async def test_create_entry_async(transport: str = "grpc_asyncio"): +async def test_create_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -1976,7 +2080,7 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -1990,6 +2094,11 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + + def test_create_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1999,7 +2108,7 @@ def test_create_entry_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.create_entry(request) @@ -2024,9 +2133,7 @@ async def test_create_entry_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.create_entry(request) @@ -2045,7 +2152,7 @@ def test_create_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2088,9 +2195,7 @@ async def test_create_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2142,14 +2247,14 @@ def test_update_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2165,6 +2270,7 @@ def test_update_entry( assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2181,19 +2287,19 @@ def test_update_entry_from_dict(): @pytest.mark.asyncio -async def test_update_entry_async(transport: str = "grpc_asyncio"): +async def test_update_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2210,7 +2316,7 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2224,6 +2330,11 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + + def test_update_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2233,7 +2344,7 @@ def test_update_entry_field_headers(): request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.update_entry(request) @@ -2258,9 +2369,7 @@ async def test_update_entry_field_headers_async(): request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.update_entry(request) @@ -2279,7 +2388,7 @@ def test_update_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2318,9 +2427,7 @@ async def test_update_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2368,7 +2475,7 @@ def test_delete_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2389,19 +2496,19 @@ def test_delete_entry_from_dict(): @pytest.mark.asyncio -async def test_delete_entry_async(transport: str = "grpc_asyncio"): +async def test_delete_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2411,12 +2518,17 @@ async def test_delete_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + + def test_delete_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2426,7 +2538,7 @@ def test_delete_entry_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = None client.delete_entry(request) @@ -2451,9 +2563,7 @@ async def test_delete_entry_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_entry(request) @@ -2472,7 +2582,7 @@ def test_delete_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2504,9 +2614,7 @@ async def test_delete_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2545,14 +2653,14 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2568,6 +2676,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2584,19 +2693,19 @@ def test_get_entry_from_dict(): @pytest.mark.asyncio -async def test_get_entry_async(transport: str = "grpc_asyncio"): +async def test_get_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2613,7 +2722,7 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2627,6 +2736,11 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + + def test_get_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2636,7 +2750,7 @@ def test_get_entry_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.get_entry(request) @@ -2661,9 +2775,7 @@ async def test_get_entry_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.get_entry(request) @@ -2682,7 +2794,7 @@ def test_get_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2714,9 +2826,7 @@ async def test_get_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2757,14 +2867,14 @@ def test_lookup_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.lookup_entry), "__call__") as call: + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2780,6 +2890,7 @@ def test_lookup_entry( assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2796,19 +2907,19 @@ def test_lookup_entry_from_dict(): @pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = "grpc_asyncio"): +async def test_lookup_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.LookupEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.lookup_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2825,7 +2936,7 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2839,6 +2950,11 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + + def test_list_entries( transport: str = "grpc", request_type=datacatalog.ListEntriesRequest ): @@ -2851,7 +2967,7 @@ def test_list_entries( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse( next_page_token="next_page_token_value", @@ -2866,6 +2982,7 @@ def test_list_entries( assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) assert response.next_page_token == "next_page_token_value" @@ -2876,19 +2993,19 @@ def test_list_entries_from_dict(): @pytest.mark.asyncio -async def test_list_entries_async(transport: str = "grpc_asyncio"): +async def test_list_entries_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListEntriesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",) @@ -2900,7 +3017,7 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesAsyncPager) @@ -2908,6 +3025,11 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + + def test_list_entries_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2917,7 +3039,7 @@ def test_list_entries_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = datacatalog.ListEntriesResponse() client.list_entries(request) @@ -2942,9 +3064,7 @@ async def test_list_entries_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) @@ -2965,7 +3085,7 @@ def test_list_entries_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() @@ -2997,9 +3117,7 @@ async def test_list_entries_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() @@ -3034,7 +3152,7 @@ def test_list_entries_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( @@ -3072,7 +3190,7 @@ def test_list_entries_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( @@ -3103,9 +3221,7 @@ async def test_list_entries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entries), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3142,9 +3258,7 @@ async def test_list_entries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entries), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3185,7 +3299,7 @@ def test_create_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( @@ -3201,6 +3315,7 @@ def test_create_tag_template( assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3213,18 +3328,20 @@ def test_create_tag_template_from_dict(): @pytest.mark.asyncio -async def test_create_tag_template_async(transport: str = "grpc_asyncio"): +async def test_create_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3237,7 +3354,7 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3247,6 +3364,11 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" +@pytest.mark.asyncio +async def test_create_tag_template_async_from_dict(): + await test_create_tag_template_async(request_type=dict) + + def test_create_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3257,7 +3379,7 @@ def test_create_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() @@ -3284,7 +3406,7 @@ async def test_create_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) @@ -3305,7 +3427,7 @@ def test_create_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3350,7 +3472,7 @@ async def test_create_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3403,9 +3525,7 @@ def test_get_tag_template( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", @@ -3420,6 +3540,7 @@ def test_get_tag_template( assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3432,19 +3553,19 @@ def test_get_tag_template_from_dict(): @pytest.mark.asyncio -async def test_get_tag_template_async(transport: str = "grpc_asyncio"): +async def test_get_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) @@ -3456,7 +3577,7 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3466,6 +3587,11 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" +@pytest.mark.asyncio +async def test_get_tag_template_async_from_dict(): + await test_get_tag_template_async(request_type=dict) + + def test_get_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3475,9 +3601,7 @@ def test_get_tag_template_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = tags.TagTemplate() client.get_tag_template(request) @@ -3502,9 +3626,7 @@ async def test_get_tag_template_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) await client.get_tag_template(request) @@ -3523,9 +3645,7 @@ def test_get_tag_template_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3557,9 +3677,7 @@ async def test_get_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3601,7 +3719,7 @@ def test_update_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( @@ -3617,6 +3735,7 @@ def test_update_tag_template( assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3629,18 +3748,20 @@ def test_update_tag_template_from_dict(): @pytest.mark.asyncio -async def test_update_tag_template_async(transport: str = "grpc_asyncio"): +async def test_update_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3653,7 +3774,7 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3663,6 +3784,11 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" +@pytest.mark.asyncio +async def test_update_tag_template_async_from_dict(): + await test_update_tag_template_async(request_type=dict) + + def test_update_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3673,7 +3799,7 @@ def test_update_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() @@ -3703,7 +3829,7 @@ async def test_update_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) @@ -3727,7 +3853,7 @@ def test_update_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3768,7 +3894,7 @@ async def test_update_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3818,7 +3944,7 @@ def test_delete_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3840,18 +3966,20 @@ def test_delete_tag_template_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_template_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3862,12 +3990,17 @@ async def test_delete_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_template_async_from_dict(): + await test_delete_tag_template_async(request_type=dict) + + def test_delete_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3878,7 +4011,7 @@ def test_delete_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = None @@ -3905,7 +4038,7 @@ async def test_delete_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3926,7 +4059,7 @@ def test_delete_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3964,7 +4097,7 @@ async def test_delete_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4009,7 +4142,7 @@ def test_create_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4028,6 +4161,7 @@ def test_create_tag_template_field( assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4044,18 +4178,21 @@ def test_create_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_create_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.CreateTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4073,7 +4210,7 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4087,6 +4224,11 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_create_tag_template_field_async_from_dict(): + await test_create_tag_template_field_async(request_type=dict) + + def test_create_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4097,7 +4239,7 @@ def test_create_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4124,7 +4266,7 @@ async def test_create_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4147,7 +4289,7 @@ def test_create_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4192,7 +4334,7 @@ async def test_create_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4248,7 +4390,7 @@ def test_update_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4267,6 +4409,7 @@ def test_update_tag_template_field( assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4283,18 +4426,21 @@ def test_update_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_update_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.UpdateTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4312,7 +4458,7 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4326,6 +4472,11 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_update_tag_template_field_async_from_dict(): + await test_update_tag_template_field_async(request_type=dict) + + def test_update_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4336,7 +4487,7 @@ def test_update_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4363,7 +4514,7 @@ async def test_update_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4386,7 +4537,7 @@ def test_update_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4431,7 +4582,7 @@ async def test_update_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4487,7 +4638,7 @@ def test_rename_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4506,6 +4657,7 @@ def test_rename_tag_template_field( assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4522,18 +4674,21 @@ def test_rename_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_rename_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.RenameTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.RenameTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4551,7 +4706,7 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4565,6 +4720,11 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_rename_tag_template_field_async_from_dict(): + await test_rename_tag_template_field_async(request_type=dict) + + def test_rename_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4575,7 +4735,7 @@ def test_rename_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4602,7 +4762,7 @@ async def test_rename_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4625,7 +4785,7 @@ def test_rename_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4666,7 +4826,7 @@ async def test_rename_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4718,7 +4878,7 @@ def test_delete_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4740,18 +4900,21 @@ def test_delete_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.DeleteTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4762,12 +4925,17 @@ async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_template_field_async_from_dict(): + await test_delete_tag_template_field_async(request_type=dict) + + def test_delete_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4778,7 +4946,7 @@ def test_delete_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = None @@ -4805,7 +4973,7 @@ async def test_delete_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4826,7 +4994,7 @@ def test_delete_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4864,7 +5032,7 @@ async def test_delete_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4908,7 +5076,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", @@ -4926,6 +5094,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) assert response.name == "name_value" @@ -4940,19 +5109,19 @@ def test_create_tag_from_dict(): @pytest.mark.asyncio -async def test_create_tag_async(transport: str = "grpc_asyncio"): +async def test_create_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( @@ -4968,7 +5137,7 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) @@ -4980,6 +5149,11 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"): assert response.template_display_name == "template_display_name_value" +@pytest.mark.asyncio +async def test_create_tag_async_from_dict(): + await test_create_tag_async(request_type=dict) + + def test_create_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4989,7 +5163,7 @@ def test_create_tag_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = tags.Tag() client.create_tag(request) @@ -5014,9 +5188,7 @@ async def test_create_tag_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.create_tag(request) @@ -5035,7 +5207,7 @@ def test_create_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5073,9 +5245,7 @@ async def test_create_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5120,7 +5290,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", @@ -5138,6 +5308,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) assert response.name == "name_value" @@ -5152,19 +5323,19 @@ def test_update_tag_from_dict(): @pytest.mark.asyncio -async def test_update_tag_async(transport: str = "grpc_asyncio"): +async def test_update_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( @@ -5180,7 +5351,7 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) @@ -5192,6 +5363,11 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"): assert response.template_display_name == "template_display_name_value" +@pytest.mark.asyncio +async def test_update_tag_async_from_dict(): + await test_update_tag_async(request_type=dict) + + def test_update_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5201,7 +5377,7 @@ def test_update_tag_field_headers(): request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = tags.Tag() client.update_tag(request) @@ -5226,9 +5402,7 @@ async def test_update_tag_field_headers_async(): request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.update_tag(request) @@ -5247,7 +5421,7 @@ def test_update_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5286,9 +5460,7 @@ async def test_update_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5334,7 +5506,7 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5355,19 +5527,19 @@ def test_delete_tag_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -5377,12 +5549,17 @@ async def test_delete_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_async_from_dict(): + await test_delete_tag_async(request_type=dict) + + def test_delete_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5392,7 +5569,7 @@ def test_delete_tag_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = None client.delete_tag(request) @@ -5417,9 +5594,7 @@ async def test_delete_tag_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_tag(request) @@ -5438,7 +5613,7 @@ def test_delete_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5470,9 +5645,7 @@ async def test_delete_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5511,7 +5684,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse( next_page_token="next_page_token_value", @@ -5526,6 +5699,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsPager) assert response.next_page_token == "next_page_token_value" @@ -5536,19 +5710,19 @@ def test_list_tags_from_dict(): @pytest.mark.asyncio -async def test_list_tags_async(transport: str = "grpc_asyncio"): +async def test_list_tags_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListTagsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse(next_page_token="next_page_token_value",) @@ -5560,7 +5734,7 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsAsyncPager) @@ -5568,6 +5742,11 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_tags_async_from_dict(): + await test_list_tags_async(request_type=dict) + + def test_list_tags_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5577,7 +5756,7 @@ def test_list_tags_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = datacatalog.ListTagsResponse() client.list_tags(request) @@ -5602,9 +5781,7 @@ async def test_list_tags_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) @@ -5625,7 +5802,7 @@ def test_list_tags_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() @@ -5657,9 +5834,7 @@ async def test_list_tags_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() @@ -5694,7 +5869,7 @@ def test_list_tags_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( @@ -5723,7 +5898,7 @@ def test_list_tags_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( @@ -5745,9 +5920,7 @@ async def test_list_tags_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5775,9 +5948,7 @@ async def test_list_tags_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5808,7 +5979,7 @@ def test_set_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -5821,6 +5992,7 @@ def test_set_iam_policy( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -5833,19 +6005,19 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -5857,7 +6029,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -5867,6 +6039,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + def test_set_iam_policy_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5876,7 +6053,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -5901,9 +6078,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -5918,10 +6093,10 @@ async def test_set_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_set_iam_policy_from_dict(): +def test_set_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -5938,7 +6113,7 @@ def test_set_iam_policy_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -5970,9 +6145,7 @@ async def test_set_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6013,7 +6186,7 @@ def test_get_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -6026,6 +6199,7 @@ def test_get_iam_policy( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -6038,19 +6212,19 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -6062,7 +6236,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -6072,6 +6246,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + def test_get_iam_policy_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -6081,7 +6260,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -6106,9 +6285,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -6123,10 +6300,10 @@ async def test_get_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_get_iam_policy_from_dict(): +def test_get_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6143,7 +6320,7 @@ def test_get_iam_policy_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6175,9 +6352,7 @@ async def test_get_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6219,7 +6394,7 @@ def test_test_iam_permissions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -6235,6 +6410,7 @@ def test_test_iam_permissions( assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6245,18 +6421,20 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6269,7 +6447,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy.TestIamPermissionsResponse) @@ -6277,6 +6455,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert response.permissions == ["permissions_value"] +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + def test_test_iam_permissions_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -6287,7 +6470,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -6314,7 +6497,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -6332,11 +6515,11 @@ async def test_test_iam_permissions_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_test_iam_permissions_from_dict(): +def test_test_iam_permissions_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -6386,7 +6569,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DataCatalogClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -6404,10 +6587,22 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DataCatalogGrpcTransport,) + assert isinstance(client.transport, transports.DataCatalogGrpcTransport,) def test_data_catalog_base_transport_error(): @@ -6484,6 +6679,17 @@ def test_data_catalog_base_transport_with_credentials_file(): ) +def test_data_catalog_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport() + adc.assert_called_once() + + def test_data_catalog_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -6516,7 +6722,7 @@ def test_data_catalog_host_no_port(): api_endpoint="datacatalog.googleapis.com" ), ) - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_host_with_port(): @@ -6526,218 +6732,111 @@ def test_data_catalog_host_with_port(): api_endpoint="datacatalog.googleapis.com:8000" ), ) - assert client._transport._host == "datacatalog.googleapis.com:8000" + assert client.transport._host == "datacatalog.googleapis.com:8000" def test_data_catalog_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_data_catalog_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_data_catalog_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_data_catalog_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_data_catalog_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_tag_path(): - project = "squid" - location = "clam" - entry_group = "whelk" - entry = "octopus" - tag = "oyster" - - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( - project=project, - location=location, - entry_group=entry_group, - entry=entry, - tag=tag, - ) - actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) - assert expected == actual - - -def test_parse_tag_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - "entry": "winkle", - "tag": "nautilus", - } - path = DataCatalogClient.tag_path(**expected) + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_path(path) - assert expected == actual + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_entry_path(): @@ -6768,9 +6867,9 @@ def test_parse_entry_path(): def test_entry_group_path(): - project = "squid" - location = "clam" - entry_group = "whelk" + project = "winkle" + location = "nautilus" + entry_group = "scallop" expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( project=project, location=location, entry_group=entry_group, @@ -6781,9 +6880,9 @@ def test_entry_group_path(): def test_parse_entry_group_path(): expected = { - "project": "octopus", - "location": "oyster", - "entry_group": "nudibranch", + "project": "abalone", + "location": "squid", + "entry_group": "clam", } path = DataCatalogClient.entry_group_path(**expected) @@ -6792,6 +6891,39 @@ def test_parse_entry_group_path(): assert expected == actual +def test_tag_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + entry = "nudibranch" + tag = "cuttlefish" + + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( + project=project, + location=location, + entry_group=entry_group, + entry=entry, + tag=tag, + ) + actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) + assert expected == actual + + +def test_parse_tag_path(): + expected = { + "project": "mussel", + "location": "winkle", + "entry_group": "nautilus", + "entry": "scallop", + "tag": "abalone", + } + path = DataCatalogClient.tag_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_path(path) + assert expected == actual + + def test_tag_template_path(): project = "squid" location = "clam" @@ -6818,10 +6950,10 @@ def test_parse_tag_template_path(): def test_tag_template_field_path(): - project = "squid" - location = "clam" - tag_template = "whelk" - field = "octopus" + project = "cuttlefish" + location = "mussel" + tag_template = "winkle" + field = "nautilus" expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, @@ -6834,10 +6966,10 @@ def test_tag_template_field_path(): def test_parse_tag_template_field_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "tag_template": "cuttlefish", - "field": "mussel", + "project": "scallop", + "location": "abalone", + "tag_template": "squid", + "field": "clam", } path = DataCatalogClient.tag_template_field_path(**expected) @@ -6846,6 +6978,107 @@ def test_parse_tag_template_field_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "whelk" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataCatalogClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = DataCatalogClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = DataCatalogClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DataCatalogClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DataCatalogClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DataCatalogClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = DataCatalogClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DataCatalogClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = DataCatalogClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DataCatalogClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index cf831bf6..57088c0c 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -41,14 +41,9 @@ from google.cloud.datacatalog_v1beta1.types import common from google.cloud.datacatalog_v1beta1.types import datacatalog from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1beta1.types import ( - gcs_fileset_spec as gcd_gcs_fileset_spec, -) from google.cloud.datacatalog_v1beta1.types import schema -from google.cloud.datacatalog_v1beta1.types import schema as gcd_schema from google.cloud.datacatalog_v1beta1.types import search from google.cloud.datacatalog_v1beta1.types import table_spec -from google.cloud.datacatalog_v1beta1.types import table_spec as gcd_table_spec from google.cloud.datacatalog_v1beta1.types import tags from google.cloud.datacatalog_v1beta1.types import timestamps from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore @@ -109,12 +104,12 @@ def test_data_catalog_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_client_get_transport_class(): @@ -168,15 +163,14 @@ def test_data_catalog_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -185,15 +179,14 @@ def test_data_catalog_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -202,95 +195,173 @@ def test_data_catalog_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), + ( + DataCatalogAsyncClient, + transports.DataCatalogGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), + ( + DataCatalogAsyncClient, + transports.DataCatalogGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient) +) +@mock.patch.object( + DataCatalogAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataCatalogAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_catalog_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -317,8 +388,7 @@ def test_data_catalog_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -348,8 +418,7 @@ def test_data_catalog_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -366,8 +435,7 @@ def test_data_catalog_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -385,7 +453,7 @@ def test_search_catalog( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse( next_page_token="next_page_token_value", @@ -400,6 +468,7 @@ def test_search_catalog( assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogPager) assert response.next_page_token == "next_page_token_value" @@ -410,19 +479,19 @@ def test_search_catalog_from_dict(): @pytest.mark.asyncio -async def test_search_catalog_async(transport: str = "grpc_asyncio"): +async def test_search_catalog_async( + transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.SearchCatalogRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.search_catalog), "__call__" - ) as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse(next_page_token="next_page_token_value",) @@ -434,7 +503,7 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.SearchCatalogRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) @@ -442,11 +511,16 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_search_catalog_async_from_dict(): + await test_search_catalog_async(request_type=dict) + + def test_search_catalog_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() @@ -491,9 +565,7 @@ async def test_search_catalog_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.search_catalog), "__call__" - ) as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse() @@ -541,7 +613,7 @@ def test_search_catalog_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( @@ -576,7 +648,7 @@ def test_search_catalog_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.search_catalog), "__call__") as call: + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.SearchCatalogResponse( @@ -607,9 +679,7 @@ async def test_search_catalog_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.search_catalog), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -646,9 +716,7 @@ async def test_search_catalog_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.search_catalog), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -689,7 +757,7 @@ def test_create_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( @@ -707,6 +775,7 @@ def test_create_entry_group( assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -721,18 +790,20 @@ def test_create_entry_group_from_dict(): @pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = "grpc_asyncio"): +async def test_create_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -749,7 +820,7 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -761,6 +832,11 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + + def test_create_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -771,7 +847,7 @@ def test_create_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() @@ -798,7 +874,7 @@ async def test_create_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() @@ -821,7 +897,7 @@ def test_create_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -866,7 +942,7 @@ async def test_create_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_entry_group), "__call__" + type(client.transport.create_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -922,7 +998,7 @@ def test_update_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( @@ -940,6 +1016,7 @@ def test_update_entry_group( assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -954,18 +1031,20 @@ def test_update_entry_group_from_dict(): @pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = "grpc_asyncio"): +async def test_update_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -982,7 +1061,7 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -994,6 +1073,11 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + + def test_update_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1004,7 +1088,7 @@ def test_update_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = datacatalog.EntryGroup() @@ -1033,7 +1117,7 @@ async def test_update_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() @@ -1058,7 +1142,7 @@ def test_update_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1099,7 +1183,7 @@ async def test_update_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_entry_group), "__call__" + type(client.transport.update_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1150,7 +1234,7 @@ def test_get_entry_group( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup( name="name_value", @@ -1167,6 +1251,7 @@ def test_get_entry_group( assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) assert response.name == "name_value" @@ -1181,19 +1266,19 @@ def test_get_entry_group_from_dict(): @pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = "grpc_asyncio"): +async def test_get_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup( @@ -1209,7 +1294,7 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetEntryGroupRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.EntryGroup) @@ -1221,6 +1306,11 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + + def test_get_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1230,7 +1320,7 @@ def test_get_entry_group_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = datacatalog.EntryGroup() client.get_entry_group(request) @@ -1255,9 +1345,7 @@ async def test_get_entry_group_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.EntryGroup() ) @@ -1278,7 +1366,7 @@ def test_get_entry_group_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1316,9 +1404,7 @@ async def test_get_entry_group_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry_group), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.EntryGroup() @@ -1368,7 +1454,7 @@ def test_delete_entry_group( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1390,18 +1476,20 @@ def test_delete_entry_group_from_dict(): @pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = "grpc_asyncio"): +async def test_delete_entry_group_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteEntryGroupRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1412,12 +1500,17 @@ async def test_delete_entry_group_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteEntryGroupRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + + def test_delete_entry_group_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1428,7 +1521,7 @@ def test_delete_entry_group_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = None @@ -1455,7 +1548,7 @@ async def test_delete_entry_group_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1476,7 +1569,7 @@ def test_delete_entry_group_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1510,7 +1603,7 @@ async def test_delete_entry_group_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_entry_group), "__call__" + type(client.transport.delete_entry_group), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1553,7 +1646,7 @@ def test_list_entry_groups( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse( @@ -1569,6 +1662,7 @@ def test_list_entry_groups( assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) assert response.next_page_token == "next_page_token_value" @@ -1579,18 +1673,20 @@ def test_list_entry_groups_from_dict(): @pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): +async def test_list_entry_groups_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListEntryGroupsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1605,7 +1701,7 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListEntryGroupsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntryGroupsAsyncPager) @@ -1613,6 +1709,11 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + + def test_list_entry_groups_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1623,7 +1724,7 @@ def test_list_entry_groups_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1650,7 +1751,7 @@ async def test_list_entry_groups_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntryGroupsResponse() @@ -1673,7 +1774,7 @@ def test_list_entry_groups_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1707,7 +1808,7 @@ async def test_list_entry_groups_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntryGroupsResponse() @@ -1744,7 +1845,7 @@ def test_list_entry_groups_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1786,7 +1887,7 @@ def test_list_entry_groups_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_entry_groups), "__call__" + type(client.transport.list_entry_groups), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1820,7 +1921,7 @@ async def test_list_entry_groups_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), + type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1861,7 +1962,7 @@ async def test_list_entry_groups_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entry_groups), + type(client.transport.list_entry_groups), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1905,14 +2006,14 @@ def test_create_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -1928,6 +2029,7 @@ def test_create_entry( assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -1944,19 +2046,19 @@ def test_create_entry_from_dict(): @pytest.mark.asyncio -async def test_create_entry_async(transport: str = "grpc_asyncio"): +async def test_create_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -1973,7 +2075,7 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -1987,6 +2089,11 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + + def test_create_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -1996,7 +2103,7 @@ def test_create_entry_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.create_entry(request) @@ -2021,9 +2128,7 @@ async def test_create_entry_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.create_entry(request) @@ -2042,7 +2147,7 @@ def test_create_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_entry), "__call__") as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2085,9 +2190,7 @@ async def test_create_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2139,14 +2242,14 @@ def test_update_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2162,6 +2265,7 @@ def test_update_entry( assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2178,19 +2282,19 @@ def test_update_entry_from_dict(): @pytest.mark.asyncio -async def test_update_entry_async(transport: str = "grpc_asyncio"): +async def test_update_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2207,7 +2311,7 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2221,6 +2325,11 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + + def test_update_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2230,7 +2339,7 @@ def test_update_entry_field_headers(): request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.update_entry(request) @@ -2255,9 +2364,7 @@ async def test_update_entry_field_headers_async(): request.entry.name = "entry.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.update_entry(request) @@ -2276,7 +2383,7 @@ def test_update_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_entry), "__call__") as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2315,9 +2422,7 @@ async def test_update_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2365,7 +2470,7 @@ def test_delete_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2386,19 +2491,19 @@ def test_delete_entry_from_dict(): @pytest.mark.asyncio -async def test_delete_entry_async(transport: str = "grpc_asyncio"): +async def test_delete_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2408,12 +2513,17 @@ async def test_delete_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteEntryRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + + def test_delete_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2423,7 +2533,7 @@ def test_delete_entry_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = None client.delete_entry(request) @@ -2448,9 +2558,7 @@ async def test_delete_entry_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_entry(request) @@ -2469,7 +2577,7 @@ def test_delete_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_entry), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2501,9 +2609,7 @@ async def test_delete_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2542,14 +2648,14 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2565,6 +2671,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2581,19 +2688,19 @@ def test_get_entry_from_dict(): @pytest.mark.asyncio -async def test_get_entry_async(transport: str = "grpc_asyncio"): +async def test_get_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2610,7 +2717,7 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2624,6 +2731,11 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + + def test_get_entry_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2633,7 +2745,7 @@ def test_get_entry_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = datacatalog.Entry() client.get_entry(request) @@ -2658,9 +2770,7 @@ async def test_get_entry_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) await client.get_entry(request) @@ -2679,7 +2789,7 @@ def test_get_entry_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_entry), "__call__") as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2711,9 +2821,7 @@ async def test_get_entry_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry() @@ -2754,14 +2862,14 @@ def test_lookup_entry( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.lookup_entry), "__call__") as call: + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.Entry( name="name_value", linked_resource="linked_resource_value", display_name="display_name_value", description="description_value", - type=datacatalog.EntryType.TABLE, + type_=datacatalog.EntryType.TABLE, integrated_system=common.IntegratedSystem.BIGQUERY, gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec( file_patterns=["file_patterns_value"] @@ -2777,6 +2885,7 @@ def test_lookup_entry( assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) assert response.name == "name_value" @@ -2793,19 +2902,19 @@ def test_lookup_entry_from_dict(): @pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = "grpc_asyncio"): +async def test_lookup_entry_async( + transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.LookupEntryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.lookup_entry), "__call__" - ) as call: + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.Entry( @@ -2822,7 +2931,7 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.LookupEntryRequest() # Establish that the response is the type that we expect. assert isinstance(response, datacatalog.Entry) @@ -2836,6 +2945,11 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + + def test_list_entries( transport: str = "grpc", request_type=datacatalog.ListEntriesRequest ): @@ -2848,7 +2962,7 @@ def test_list_entries( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse( next_page_token="next_page_token_value", @@ -2863,6 +2977,7 @@ def test_list_entries( assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) assert response.next_page_token == "next_page_token_value" @@ -2873,19 +2988,19 @@ def test_list_entries_from_dict(): @pytest.mark.asyncio -async def test_list_entries_async(transport: str = "grpc_asyncio"): +async def test_list_entries_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListEntriesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",) @@ -2897,7 +3012,7 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEntriesAsyncPager) @@ -2905,6 +3020,11 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + + def test_list_entries_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -2914,7 +3034,7 @@ def test_list_entries_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = datacatalog.ListEntriesResponse() client.list_entries(request) @@ -2939,9 +3059,7 @@ async def test_list_entries_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListEntriesResponse() ) @@ -2962,7 +3080,7 @@ def test_list_entries_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() @@ -2994,9 +3112,7 @@ async def test_list_entries_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_entries), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListEntriesResponse() @@ -3031,7 +3147,7 @@ def test_list_entries_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( @@ -3069,7 +3185,7 @@ def test_list_entries_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_entries), "__call__") as call: + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListEntriesResponse( @@ -3100,9 +3216,7 @@ async def test_list_entries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entries), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3139,9 +3253,7 @@ async def test_list_entries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_entries), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3182,7 +3294,7 @@ def test_create_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( @@ -3198,6 +3310,7 @@ def test_create_tag_template( assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3210,18 +3323,20 @@ def test_create_tag_template_from_dict(): @pytest.mark.asyncio -async def test_create_tag_template_async(transport: str = "grpc_asyncio"): +async def test_create_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3234,7 +3349,7 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3244,8 +3359,13 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" -def test_create_tag_template_field_headers(): - client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) +@pytest.mark.asyncio +async def test_create_tag_template_async_from_dict(): + await test_create_tag_template_async(request_type=dict) + + +def test_create_tag_template_field_headers(): + client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3254,7 +3374,7 @@ def test_create_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() @@ -3281,7 +3401,7 @@ async def test_create_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) @@ -3302,7 +3422,7 @@ def test_create_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3347,7 +3467,7 @@ async def test_create_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template), "__call__" + type(client.transport.create_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3400,9 +3520,7 @@ def test_get_tag_template( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( name="name_value", display_name="display_name_value", @@ -3417,6 +3535,7 @@ def test_get_tag_template( assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3429,19 +3548,19 @@ def test_get_tag_template_from_dict(): @pytest.mark.asyncio -async def test_get_tag_template_async(transport: str = "grpc_asyncio"): +async def test_get_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.GetTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplate(name="name_value", display_name="display_name_value",) @@ -3453,7 +3572,7 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.GetTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3463,6 +3582,11 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" +@pytest.mark.asyncio +async def test_get_tag_template_async_from_dict(): + await test_get_tag_template_async(request_type=dict) + + def test_get_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3472,9 +3596,7 @@ def test_get_tag_template_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = tags.TagTemplate() client.get_tag_template(request) @@ -3499,9 +3621,7 @@ async def test_get_tag_template_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) await client.get_tag_template(request) @@ -3520,9 +3640,7 @@ def test_get_tag_template_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3554,9 +3672,7 @@ async def test_get_tag_template_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_tag_template), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3598,7 +3714,7 @@ def test_update_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate( @@ -3614,6 +3730,7 @@ def test_update_tag_template( assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) assert response.name == "name_value" @@ -3626,18 +3743,20 @@ def test_update_tag_template_from_dict(): @pytest.mark.asyncio -async def test_update_tag_template_async(transport: str = "grpc_asyncio"): +async def test_update_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3650,7 +3769,7 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplate) @@ -3660,6 +3779,11 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"): assert response.display_name == "display_name_value" +@pytest.mark.asyncio +async def test_update_tag_template_async_from_dict(): + await test_update_tag_template_async(request_type=dict) + + def test_update_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3670,7 +3794,7 @@ def test_update_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = tags.TagTemplate() @@ -3700,7 +3824,7 @@ async def test_update_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) @@ -3724,7 +3848,7 @@ def test_update_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3765,7 +3889,7 @@ async def test_update_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template), "__call__" + type(client.transport.update_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplate() @@ -3815,7 +3939,7 @@ def test_delete_tag_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3837,18 +3961,20 @@ def test_delete_tag_template_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_template_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_template_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3859,12 +3985,17 @@ async def test_delete_tag_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagTemplateRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_template_async_from_dict(): + await test_delete_tag_template_async(request_type=dict) + + def test_delete_tag_template_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -3875,7 +4006,7 @@ def test_delete_tag_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = None @@ -3902,7 +4033,7 @@ async def test_delete_tag_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3923,7 +4054,7 @@ def test_delete_tag_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3961,7 +4092,7 @@ async def test_delete_tag_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template), "__call__" + type(client.transport.delete_tag_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4006,7 +4137,7 @@ def test_create_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4025,6 +4156,7 @@ def test_create_tag_template_field( assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4041,18 +4173,21 @@ def test_create_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_create_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.CreateTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4070,7 +4205,7 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4084,6 +4219,11 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_create_tag_template_field_async_from_dict(): + await test_create_tag_template_field_async(request_type=dict) + + def test_create_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4094,7 +4234,7 @@ def test_create_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4121,7 +4261,7 @@ async def test_create_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4144,7 +4284,7 @@ def test_create_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4189,7 +4329,7 @@ async def test_create_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_tag_template_field), "__call__" + type(client.transport.create_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4245,7 +4385,7 @@ def test_update_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4264,6 +4404,7 @@ def test_update_tag_template_field( assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4280,18 +4421,21 @@ def test_update_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_update_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.UpdateTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4309,7 +4453,7 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4323,6 +4467,11 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_update_tag_template_field_async_from_dict(): + await test_update_tag_template_field_async(request_type=dict) + + def test_update_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4333,7 +4482,7 @@ def test_update_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4360,7 +4509,7 @@ async def test_update_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4383,7 +4532,7 @@ def test_update_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4428,7 +4577,7 @@ async def test_update_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_tag_template_field), "__call__" + type(client.transport.update_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4484,7 +4633,7 @@ def test_rename_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField( @@ -4503,6 +4652,7 @@ def test_rename_tag_template_field( assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) assert response.name == "name_value" @@ -4519,18 +4669,21 @@ def test_rename_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_rename_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.RenameTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.RenameTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4548,7 +4701,7 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.TagTemplateField) @@ -4562,6 +4715,11 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"): assert response.order == 540 +@pytest.mark.asyncio +async def test_rename_tag_template_field_async_from_dict(): + await test_rename_tag_template_field_async(request_type=dict) + + def test_rename_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4572,7 +4730,7 @@ def test_rename_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = tags.TagTemplateField() @@ -4599,7 +4757,7 @@ async def test_rename_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.TagTemplateField() @@ -4622,7 +4780,7 @@ def test_rename_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4663,7 +4821,7 @@ async def test_rename_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.rename_tag_template_field), "__call__" + type(client.transport.rename_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = tags.TagTemplateField() @@ -4715,7 +4873,7 @@ def test_delete_tag_template_field( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4737,18 +4895,21 @@ def test_delete_tag_template_field_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_template_field_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.DeleteTagTemplateFieldRequest, +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagTemplateFieldRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4759,12 +4920,17 @@ async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_template_field_async_from_dict(): + await test_delete_tag_template_field_async(request_type=dict) + + def test_delete_tag_template_field_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4775,7 +4941,7 @@ def test_delete_tag_template_field_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = None @@ -4802,7 +4968,7 @@ async def test_delete_tag_template_field_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4823,7 +4989,7 @@ def test_delete_tag_template_field_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4861,7 +5027,7 @@ async def test_delete_tag_template_field_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_tag_template_field), "__call__" + type(client.transport.delete_tag_template_field), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4905,7 +5071,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", @@ -4923,6 +5089,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) assert response.name == "name_value" @@ -4937,19 +5104,19 @@ def test_create_tag_from_dict(): @pytest.mark.asyncio -async def test_create_tag_async(transport: str = "grpc_asyncio"): +async def test_create_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.CreateTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( @@ -4965,7 +5132,7 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.CreateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) @@ -4977,6 +5144,11 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"): assert response.template_display_name == "template_display_name_value" +@pytest.mark.asyncio +async def test_create_tag_async_from_dict(): + await test_create_tag_async(request_type=dict) + + def test_create_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -4986,7 +5158,7 @@ def test_create_tag_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = tags.Tag() client.create_tag(request) @@ -5011,9 +5183,7 @@ async def test_create_tag_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.create_tag(request) @@ -5032,7 +5202,7 @@ def test_create_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_tag), "__call__") as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5070,9 +5240,7 @@ async def test_create_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5117,7 +5285,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag( name="name_value", @@ -5135,6 +5303,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) assert response.name == "name_value" @@ -5149,19 +5318,19 @@ def test_update_tag_from_dict(): @pytest.mark.asyncio -async def test_update_tag_async(transport: str = "grpc_asyncio"): +async def test_update_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.UpdateTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( tags.Tag( @@ -5177,7 +5346,7 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.UpdateTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, tags.Tag) @@ -5189,6 +5358,11 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"): assert response.template_display_name == "template_display_name_value" +@pytest.mark.asyncio +async def test_update_tag_async_from_dict(): + await test_update_tag_async(request_type=dict) + + def test_update_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5198,7 +5372,7 @@ def test_update_tag_field_headers(): request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = tags.Tag() client.update_tag(request) @@ -5223,9 +5397,7 @@ async def test_update_tag_field_headers_async(): request.tag.name = "tag.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) await client.update_tag(request) @@ -5244,7 +5416,7 @@ def test_update_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_tag), "__call__") as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5283,9 +5455,7 @@ async def test_update_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = tags.Tag() @@ -5331,7 +5501,7 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5352,19 +5522,19 @@ def test_delete_tag_from_dict(): @pytest.mark.asyncio -async def test_delete_tag_async(transport: str = "grpc_asyncio"): +async def test_delete_tag_async( + transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.DeleteTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -5374,12 +5544,17 @@ async def test_delete_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.DeleteTagRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_tag_async_from_dict(): + await test_delete_tag_async(request_type=dict) + + def test_delete_tag_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5389,7 +5564,7 @@ def test_delete_tag_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = None client.delete_tag(request) @@ -5414,9 +5589,7 @@ async def test_delete_tag_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_tag(request) @@ -5435,7 +5608,7 @@ def test_delete_tag_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_tag), "__call__") as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5467,9 +5640,7 @@ async def test_delete_tag_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5508,7 +5679,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse( next_page_token="next_page_token_value", @@ -5523,6 +5694,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsPager) assert response.next_page_token == "next_page_token_value" @@ -5533,19 +5705,19 @@ def test_list_tags_from_dict(): @pytest.mark.asyncio -async def test_list_tags_async(transport: str = "grpc_asyncio"): +async def test_list_tags_async( + transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = datacatalog.ListTagsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse(next_page_token="next_page_token_value",) @@ -5557,7 +5729,7 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == datacatalog.ListTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTagsAsyncPager) @@ -5565,6 +5737,11 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_tags_async_from_dict(): + await test_list_tags_async(request_type=dict) + + def test_list_tags_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5574,7 +5751,7 @@ def test_list_tags_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = datacatalog.ListTagsResponse() client.list_tags(request) @@ -5599,9 +5776,7 @@ async def test_list_tags_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.ListTagsResponse() ) @@ -5622,7 +5797,7 @@ def test_list_tags_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() @@ -5654,9 +5829,7 @@ async def test_list_tags_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.ListTagsResponse() @@ -5691,7 +5864,7 @@ def test_list_tags_pager(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( @@ -5720,7 +5893,7 @@ def test_list_tags_pages(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_tags), "__call__") as call: + with mock.patch.object(type(client.transport.list_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( datacatalog.ListTagsResponse( @@ -5742,9 +5915,7 @@ async def test_list_tags_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5772,9 +5943,7 @@ async def test_list_tags_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5805,7 +5974,7 @@ def test_set_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -5818,6 +5987,7 @@ def test_set_iam_policy( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -5830,19 +6000,19 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -5854,7 +6024,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -5864,6 +6034,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + def test_set_iam_policy_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -5873,7 +6048,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -5898,9 +6073,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -5915,10 +6088,10 @@ async def test_set_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_set_iam_policy_from_dict(): +def test_set_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -5935,7 +6108,7 @@ def test_set_iam_policy_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -5967,9 +6140,7 @@ async def test_set_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6010,7 +6181,7 @@ def test_get_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -6023,6 +6194,7 @@ def test_get_iam_policy( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -6035,19 +6207,19 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -6059,7 +6231,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -6069,6 +6241,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + def test_get_iam_policy_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -6078,7 +6255,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -6103,9 +6280,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -6120,10 +6295,10 @@ async def test_get_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_get_iam_policy_from_dict(): +def test_get_iam_policy_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6140,7 +6315,7 @@ def test_get_iam_policy_flattened(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6172,9 +6347,7 @@ async def test_get_iam_policy_flattened_async(): client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -6216,7 +6389,7 @@ def test_test_iam_permissions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -6232,6 +6405,7 @@ def test_test_iam_permissions( assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -6242,18 +6416,20 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest +): client = DataCatalogAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6266,7 +6442,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy.TestIamPermissionsResponse) @@ -6274,6 +6450,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert response.permissions == ["permissions_value"] +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + def test_test_iam_permissions_field_headers(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) @@ -6284,7 +6465,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -6311,7 +6492,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -6329,11 +6510,11 @@ async def test_test_iam_permissions_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_test_iam_permissions_from_dict(): +def test_test_iam_permissions_from_dict_foreign(): client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -6383,7 +6564,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DataCatalogClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -6401,10 +6582,22 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DataCatalogGrpcTransport,) + assert isinstance(client.transport, transports.DataCatalogGrpcTransport,) def test_data_catalog_base_transport_error(): @@ -6481,6 +6674,17 @@ def test_data_catalog_base_transport_with_credentials_file(): ) +def test_data_catalog_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport() + adc.assert_called_once() + + def test_data_catalog_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -6513,7 +6717,7 @@ def test_data_catalog_host_no_port(): api_endpoint="datacatalog.googleapis.com" ), ) - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_data_catalog_host_with_port(): @@ -6523,193 +6727,171 @@ def test_data_catalog_host_with_port(): api_endpoint="datacatalog.googleapis.com:8000" ), ) - assert client._transport._host == "datacatalog.googleapis.com:8000" + assert client.transport._host == "datacatalog.googleapis.com:8000" def test_data_catalog_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_data_catalog_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_data_catalog_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_data_catalog_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_data_catalog_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel -def test_tag_path(): +def test_entry_path(): project = "squid" location = "clam" entry_group = "whelk" entry = "octopus" - tag = "oyster" + + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( + project=project, location=location, entry_group=entry_group, entry=entry, + ) + actual = DataCatalogClient.entry_path(project, location, entry_group, entry) + assert expected == actual + + +def test_parse_entry_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "entry_group": "cuttlefish", + "entry": "mussel", + } + path = DataCatalogClient.entry_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_path(path) + assert expected == actual + + +def test_entry_group_path(): + project = "winkle" + location = "nautilus" + entry_group = "scallop" + + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( + project=project, location=location, entry_group=entry_group, + ) + actual = DataCatalogClient.entry_group_path(project, location, entry_group) + assert expected == actual + + +def test_parse_entry_group_path(): + expected = { + "project": "abalone", + "location": "squid", + "entry_group": "clam", + } + path = DataCatalogClient.entry_group_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_group_path(path) + assert expected == actual + + +def test_tag_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + entry = "nudibranch" + tag = "cuttlefish" expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format( project=project, @@ -6724,11 +6906,11 @@ def test_tag_path(): def test_parse_tag_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "entry_group": "mussel", - "entry": "winkle", - "tag": "nautilus", + "project": "mussel", + "location": "winkle", + "entry_group": "nautilus", + "entry": "scallop", + "tag": "abalone", } path = DataCatalogClient.tag_path(**expected) @@ -6737,38 +6919,36 @@ def test_parse_tag_path(): assert expected == actual -def test_entry_path(): +def test_tag_template_path(): project = "squid" location = "clam" - entry_group = "whelk" - entry = "octopus" + tag_template = "whelk" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format( - project=project, location=location, entry_group=entry_group, entry=entry, + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( + project=project, location=location, tag_template=tag_template, ) - actual = DataCatalogClient.entry_path(project, location, entry_group, entry) + actual = DataCatalogClient.tag_template_path(project, location, tag_template) assert expected == actual -def test_parse_entry_path(): +def test_parse_tag_template_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "entry_group": "cuttlefish", - "entry": "mussel", + "project": "octopus", + "location": "oyster", + "tag_template": "nudibranch", } - path = DataCatalogClient.entry_path(**expected) + path = DataCatalogClient.tag_template_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_path(path) + actual = DataCatalogClient.parse_tag_template_path(path) assert expected == actual def test_tag_template_field_path(): - project = "squid" - location = "clam" - tag_template = "whelk" - field = "octopus" + project = "cuttlefish" + location = "mussel" + tag_template = "winkle" + field = "nautilus" expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format( project=project, location=location, tag_template=tag_template, field=field, @@ -6781,10 +6961,10 @@ def test_tag_template_field_path(): def test_parse_tag_template_field_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "tag_template": "cuttlefish", - "field": "mussel", + "project": "scallop", + "location": "abalone", + "tag_template": "squid", + "field": "clam", } path = DataCatalogClient.tag_template_field_path(**expected) @@ -6793,53 +6973,104 @@ def test_parse_tag_template_field_path(): assert expected == actual -def test_entry_group_path(): - project = "squid" - location = "clam" - entry_group = "whelk" +def test_common_billing_account_path(): + billing_account = "whelk" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format( - project=project, location=location, entry_group=entry_group, + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - actual = DataCatalogClient.entry_group_path(project, location, entry_group) + actual = DataCatalogClient.common_billing_account_path(billing_account) assert expected == actual -def test_parse_entry_group_path(): +def test_parse_common_billing_account_path(): expected = { - "project": "octopus", - "location": "oyster", - "entry_group": "nudibranch", + "billing_account": "octopus", } - path = DataCatalogClient.entry_group_path(**expected) + path = DataCatalogClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_group_path(path) + actual = DataCatalogClient.parse_common_billing_account_path(path) assert expected == actual -def test_tag_template_path(): - project = "squid" - location = "clam" - tag_template = "whelk" +def test_common_folder_path(): + folder = "oyster" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format( - project=project, location=location, tag_template=tag_template, + expected = "folders/{folder}".format(folder=folder,) + actual = DataCatalogClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DataCatalogClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DataCatalogClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DataCatalogClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = DataCatalogClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DataCatalogClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, ) - actual = DataCatalogClient.tag_template_path(project, location, tag_template) + actual = DataCatalogClient.common_location_path(project, location) assert expected == actual -def test_parse_tag_template_path(): +def test_parse_common_location_path(): expected = { - "project": "octopus", - "location": "oyster", - "tag_template": "nudibranch", + "project": "squid", + "location": "clam", } - path = DataCatalogClient.tag_template_path(**expected) + path = DataCatalogClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_path(path) + actual = DataCatalogClient.parse_common_location_path(path) assert expected == actual diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 51de69f0..fc201fe0 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -104,12 +104,12 @@ def test_policy_tag_manager_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_policy_tag_manager_client_get_transport_class(): @@ -165,15 +165,14 @@ def test_policy_tag_manager_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -182,15 +181,14 @@ def test_policy_tag_manager_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -199,95 +197,185 @@ def test_policy_tag_manager_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerClient, + transports.PolicyTagManagerGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerAsyncClient, + transports.PolicyTagManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerClient), +) +@mock.patch.object( + PolicyTagManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -314,8 +402,7 @@ def test_policy_tag_manager_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -345,8 +432,7 @@ def test_policy_tag_manager_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -365,8 +451,7 @@ def test_policy_tag_manager_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -384,7 +469,7 @@ def test_create_taxonomy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy( name="name_value", @@ -404,6 +489,7 @@ def test_create_taxonomy( assert args[0] == policytagmanager.CreateTaxonomyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) assert response.name == "name_value" @@ -422,19 +508,19 @@ def test_create_taxonomy_from_dict(): @pytest.mark.asyncio -async def test_create_taxonomy_async(transport: str = "grpc_asyncio"): +async def test_create_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.CreateTaxonomyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.CreateTaxonomyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy( @@ -453,7 +539,7 @@ async def test_create_taxonomy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.CreateTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) @@ -469,6 +555,11 @@ async def test_create_taxonomy_async(transport: str = "grpc_asyncio"): ] +@pytest.mark.asyncio +async def test_create_taxonomy_async_from_dict(): + await test_create_taxonomy_async(request_type=dict) + + def test_create_taxonomy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -478,7 +569,7 @@ def test_create_taxonomy_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() client.create_taxonomy(request) @@ -505,9 +596,7 @@ async def test_create_taxonomy_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) @@ -528,7 +617,7 @@ def test_create_taxonomy_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -569,9 +658,7 @@ async def test_create_taxonomy_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -623,7 +710,7 @@ def test_delete_taxonomy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -644,19 +731,19 @@ def test_delete_taxonomy_from_dict(): @pytest.mark.asyncio -async def test_delete_taxonomy_async(transport: str = "grpc_asyncio"): +async def test_delete_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.DeleteTaxonomyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.DeleteTaxonomyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -666,12 +753,17 @@ async def test_delete_taxonomy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.DeleteTaxonomyRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_taxonomy_async_from_dict(): + await test_delete_taxonomy_async(request_type=dict) + + def test_delete_taxonomy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -681,7 +773,7 @@ def test_delete_taxonomy_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: call.return_value = None client.delete_taxonomy(request) @@ -708,9 +800,7 @@ async def test_delete_taxonomy_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_taxonomy(request) @@ -729,7 +819,7 @@ def test_delete_taxonomy_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -763,9 +853,7 @@ async def test_delete_taxonomy_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -808,7 +896,7 @@ def test_update_taxonomy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy( name="name_value", @@ -828,6 +916,7 @@ def test_update_taxonomy( assert args[0] == policytagmanager.UpdateTaxonomyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) assert response.name == "name_value" @@ -846,19 +935,19 @@ def test_update_taxonomy_from_dict(): @pytest.mark.asyncio -async def test_update_taxonomy_async(transport: str = "grpc_asyncio"): +async def test_update_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.UpdateTaxonomyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.UpdateTaxonomyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy( @@ -877,7 +966,7 @@ async def test_update_taxonomy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.UpdateTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) @@ -893,6 +982,11 @@ async def test_update_taxonomy_async(transport: str = "grpc_asyncio"): ] +@pytest.mark.asyncio +async def test_update_taxonomy_async_from_dict(): + await test_update_taxonomy_async(request_type=dict) + + def test_update_taxonomy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -902,7 +996,7 @@ def test_update_taxonomy_field_headers(): request.taxonomy.name = "taxonomy.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() client.update_taxonomy(request) @@ -931,9 +1025,7 @@ async def test_update_taxonomy_field_headers_async(): request.taxonomy.name = "taxonomy.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) @@ -956,7 +1048,7 @@ def test_update_taxonomy_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -991,9 +1083,7 @@ async def test_update_taxonomy_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -1041,7 +1131,7 @@ def test_list_taxonomies( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListTaxonomiesResponse( next_page_token="next_page_token_value", @@ -1056,6 +1146,7 @@ def test_list_taxonomies( assert args[0] == policytagmanager.ListTaxonomiesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesPager) assert response.next_page_token == "next_page_token_value" @@ -1066,19 +1157,19 @@ def test_list_taxonomies_from_dict(): @pytest.mark.asyncio -async def test_list_taxonomies_async(transport: str = "grpc_asyncio"): +async def test_list_taxonomies_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListTaxonomiesRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.ListTaxonomiesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_taxonomies), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListTaxonomiesResponse( @@ -1092,7 +1183,7 @@ async def test_list_taxonomies_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.ListTaxonomiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTaxonomiesAsyncPager) @@ -1100,6 +1191,11 @@ async def test_list_taxonomies_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_taxonomies_async_from_dict(): + await test_list_taxonomies_async(request_type=dict) + + def test_list_taxonomies_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1109,7 +1205,7 @@ def test_list_taxonomies_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: call.return_value = policytagmanager.ListTaxonomiesResponse() client.list_taxonomies(request) @@ -1136,9 +1232,7 @@ async def test_list_taxonomies_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_taxonomies), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListTaxonomiesResponse() ) @@ -1159,7 +1253,7 @@ def test_list_taxonomies_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListTaxonomiesResponse() @@ -1193,9 +1287,7 @@ async def test_list_taxonomies_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_taxonomies), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListTaxonomiesResponse() @@ -1232,7 +1324,7 @@ def test_list_taxonomies_pager(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( policytagmanager.ListTaxonomiesResponse( @@ -1272,7 +1364,7 @@ def test_list_taxonomies_pages(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call: + with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( policytagmanager.ListTaxonomiesResponse( @@ -1305,9 +1397,7 @@ async def test_list_taxonomies_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_taxonomies), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1346,9 +1436,7 @@ async def test_list_taxonomies_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_taxonomies), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1390,7 +1478,7 @@ def test_get_taxonomy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy( name="name_value", @@ -1410,6 +1498,7 @@ def test_get_taxonomy( assert args[0] == policytagmanager.GetTaxonomyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) assert response.name == "name_value" @@ -1428,19 +1517,19 @@ def test_get_taxonomy_from_dict(): @pytest.mark.asyncio -async def test_get_taxonomy_async(transport: str = "grpc_asyncio"): +async def test_get_taxonomy_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetTaxonomyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.GetTaxonomyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy( @@ -1459,7 +1548,7 @@ async def test_get_taxonomy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.GetTaxonomyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.Taxonomy) @@ -1475,6 +1564,11 @@ async def test_get_taxonomy_async(transport: str = "grpc_asyncio"): ] +@pytest.mark.asyncio +async def test_get_taxonomy_async_from_dict(): + await test_get_taxonomy_async(request_type=dict) + + def test_get_taxonomy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1484,7 +1578,7 @@ def test_get_taxonomy_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: call.return_value = policytagmanager.Taxonomy() client.get_taxonomy(request) @@ -1511,9 +1605,7 @@ async def test_get_taxonomy_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.Taxonomy() ) @@ -1534,7 +1626,7 @@ def test_get_taxonomy_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -1568,9 +1660,7 @@ async def test_get_taxonomy_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_taxonomy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.Taxonomy() @@ -1616,7 +1706,7 @@ def test_create_policy_tag( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag( @@ -1636,6 +1726,7 @@ def test_create_policy_tag( assert args[0] == policytagmanager.CreatePolicyTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) assert response.name == "name_value" @@ -1654,18 +1745,21 @@ def test_create_policy_tag_from_dict(): @pytest.mark.asyncio -async def test_create_policy_tag_async(transport: str = "grpc_asyncio"): +async def test_create_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.CreatePolicyTagRequest, +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.CreatePolicyTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1684,7 +1778,7 @@ async def test_create_policy_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.CreatePolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) @@ -1700,6 +1794,11 @@ async def test_create_policy_tag_async(transport: str = "grpc_asyncio"): assert response.child_policy_tags == ["child_policy_tags_value"] +@pytest.mark.asyncio +async def test_create_policy_tag_async_from_dict(): + await test_create_policy_tag_async(request_type=dict) + + def test_create_policy_tag_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1710,7 +1809,7 @@ def test_create_policy_tag_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: call.return_value = policytagmanager.PolicyTag() @@ -1739,7 +1838,7 @@ async def test_create_policy_tag_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() @@ -1762,7 +1861,7 @@ def test_create_policy_tag_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -1805,7 +1904,7 @@ async def test_create_policy_tag_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_policy_tag), "__call__" + type(client.transport.create_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -1859,7 +1958,7 @@ def test_delete_policy_tag( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1881,18 +1980,21 @@ def test_delete_policy_tag_from_dict(): @pytest.mark.asyncio -async def test_delete_policy_tag_async(transport: str = "grpc_asyncio"): +async def test_delete_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.DeletePolicyTagRequest, +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.DeletePolicyTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1903,12 +2005,17 @@ async def test_delete_policy_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.DeletePolicyTagRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_policy_tag_async_from_dict(): + await test_delete_policy_tag_async(request_type=dict) + + def test_delete_policy_tag_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1919,7 +2026,7 @@ def test_delete_policy_tag_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: call.return_value = None @@ -1948,7 +2055,7 @@ async def test_delete_policy_tag_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1969,7 +2076,7 @@ def test_delete_policy_tag_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2005,7 +2112,7 @@ async def test_delete_policy_tag_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_policy_tag), "__call__" + type(client.transport.delete_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2050,7 +2157,7 @@ def test_update_policy_tag( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag( @@ -2070,6 +2177,7 @@ def test_update_policy_tag( assert args[0] == policytagmanager.UpdatePolicyTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) assert response.name == "name_value" @@ -2088,18 +2196,21 @@ def test_update_policy_tag_from_dict(): @pytest.mark.asyncio -async def test_update_policy_tag_async(transport: str = "grpc_asyncio"): +async def test_update_policy_tag_async( + transport: str = "grpc_asyncio", + request_type=policytagmanager.UpdatePolicyTagRequest, +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.UpdatePolicyTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2118,7 +2229,7 @@ async def test_update_policy_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.UpdatePolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) @@ -2134,6 +2245,11 @@ async def test_update_policy_tag_async(transport: str = "grpc_asyncio"): assert response.child_policy_tags == ["child_policy_tags_value"] +@pytest.mark.asyncio +async def test_update_policy_tag_async_from_dict(): + await test_update_policy_tag_async(request_type=dict) + + def test_update_policy_tag_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2144,7 +2260,7 @@ def test_update_policy_tag_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: call.return_value = policytagmanager.PolicyTag() @@ -2175,7 +2291,7 @@ async def test_update_policy_tag_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() @@ -2200,7 +2316,7 @@ def test_update_policy_tag_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -2239,7 +2355,7 @@ async def test_update_policy_tag_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_policy_tag), "__call__" + type(client.transport.update_policy_tag), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -2288,9 +2404,7 @@ def test_list_policy_tags( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListPolicyTagsResponse( next_page_token="next_page_token_value", @@ -2305,6 +2419,7 @@ def test_list_policy_tags( assert args[0] == policytagmanager.ListPolicyTagsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsPager) assert response.next_page_token == "next_page_token_value" @@ -2315,19 +2430,19 @@ def test_list_policy_tags_from_dict(): @pytest.mark.asyncio -async def test_list_policy_tags_async(transport: str = "grpc_asyncio"): +async def test_list_policy_tags_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.ListPolicyTagsRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.ListPolicyTagsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListPolicyTagsResponse( @@ -2341,7 +2456,7 @@ async def test_list_policy_tags_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.ListPolicyTagsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPolicyTagsAsyncPager) @@ -2349,6 +2464,11 @@ async def test_list_policy_tags_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_policy_tags_async_from_dict(): + await test_list_policy_tags_async(request_type=dict) + + def test_list_policy_tags_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2358,9 +2478,7 @@ def test_list_policy_tags_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: call.return_value = policytagmanager.ListPolicyTagsResponse() client.list_policy_tags(request) @@ -2387,9 +2505,7 @@ async def test_list_policy_tags_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.ListPolicyTagsResponse() ) @@ -2410,9 +2526,7 @@ def test_list_policy_tags_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListPolicyTagsResponse() @@ -2446,9 +2560,7 @@ async def test_list_policy_tags_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.ListPolicyTagsResponse() @@ -2485,9 +2597,7 @@ def test_list_policy_tags_pager(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( policytagmanager.ListPolicyTagsResponse( @@ -2530,9 +2640,7 @@ def test_list_policy_tags_pages(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.list_policy_tags), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( policytagmanager.ListPolicyTagsResponse( @@ -2568,9 +2676,7 @@ async def test_list_policy_tags_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_policy_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2612,9 +2718,7 @@ async def test_list_policy_tags_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_policy_tags), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2659,7 +2763,7 @@ def test_get_policy_tag( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag( name="name_value", @@ -2678,6 +2782,7 @@ def test_get_policy_tag( assert args[0] == policytagmanager.GetPolicyTagRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) assert response.name == "name_value" @@ -2696,19 +2801,19 @@ def test_get_policy_tag_from_dict(): @pytest.mark.asyncio -async def test_get_policy_tag_async(transport: str = "grpc_asyncio"): +async def test_get_policy_tag_async( + transport: str = "grpc_asyncio", request_type=policytagmanager.GetPolicyTagRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanager.GetPolicyTagRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_policy_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag( @@ -2726,7 +2831,7 @@ async def test_get_policy_tag_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanager.GetPolicyTagRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanager.PolicyTag) @@ -2742,6 +2847,11 @@ async def test_get_policy_tag_async(transport: str = "grpc_asyncio"): assert response.child_policy_tags == ["child_policy_tags_value"] +@pytest.mark.asyncio +async def test_get_policy_tag_async_from_dict(): + await test_get_policy_tag_async(request_type=dict) + + def test_get_policy_tag_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2751,7 +2861,7 @@ def test_get_policy_tag_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: call.return_value = policytagmanager.PolicyTag() client.get_policy_tag(request) @@ -2778,9 +2888,7 @@ async def test_get_policy_tag_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_policy_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanager.PolicyTag() ) @@ -2801,7 +2909,7 @@ def test_get_policy_tag_flattened(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -2835,9 +2943,7 @@ async def test_get_policy_tag_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_policy_tag), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policytagmanager.PolicyTag() @@ -2882,7 +2988,7 @@ def test_get_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -2895,6 +3001,7 @@ def test_get_iam_policy( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -2907,19 +3014,19 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -2931,7 +3038,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -2941,6 +3048,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + def test_get_iam_policy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2950,7 +3062,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -2977,9 +3089,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -2994,10 +3104,10 @@ async def test_get_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_get_iam_policy_from_dict(): +def test_get_iam_policy_from_dict_foreign(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3022,7 +3132,7 @@ def test_set_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -3035,6 +3145,7 @@ def test_set_iam_policy( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -3047,19 +3158,19 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -3071,7 +3182,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -3081,6 +3192,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + def test_set_iam_policy_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3090,7 +3206,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -3117,9 +3233,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -3134,10 +3248,10 @@ async def test_set_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_set_iam_policy_from_dict(): +def test_set_iam_policy_from_dict_foreign(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3163,7 +3277,7 @@ def test_test_iam_permissions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -3179,6 +3293,7 @@ def test_test_iam_permissions( assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -3189,18 +3304,20 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest +): client = PolicyTagManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3213,7 +3330,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy.TestIamPermissionsResponse) @@ -3221,6 +3338,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert response.permissions == ["permissions_value"] +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + def test_test_iam_permissions_field_headers(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3231,7 +3353,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3260,7 +3382,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -3278,11 +3400,11 @@ async def test_test_iam_permissions_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_test_iam_permissions_from_dict(): +def test_test_iam_permissions_from_dict_foreign(): client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3332,7 +3454,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = PolicyTagManagerClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -3350,10 +3472,25 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.PolicyTagManagerGrpcTransport,) + assert isinstance(client.transport, transports.PolicyTagManagerGrpcTransport,) def test_policy_tag_manager_base_transport_error(): @@ -3416,6 +3553,17 @@ def test_policy_tag_manager_base_transport_with_credentials_file(): ) +def test_policy_tag_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport() + adc.assert_called_once() + + def test_policy_tag_manager_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -3448,7 +3596,7 @@ def test_policy_tag_manager_host_no_port(): api_endpoint="datacatalog.googleapis.com" ), ) - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_policy_tag_manager_host_with_port(): @@ -3458,185 +3606,119 @@ def test_policy_tag_manager_host_with_port(): api_endpoint="datacatalog.googleapis.com:8000" ), ) - assert client._transport._host == "datacatalog.googleapis.com:8000" + assert client.transport._host == "datacatalog.googleapis.com:8000" def test_policy_tag_manager_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_policy_tag_manager_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_policy_tag_manager_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_policy_tag_manager_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_policy_tag_manager_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint +def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( + transport_class, ): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_policy_tag_manager_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_policy_tag_manager_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_policy_tag_path(): @@ -3669,9 +3751,9 @@ def test_parse_policy_tag_path(): def test_taxonomy_path(): - project = "squid" - location = "clam" - taxonomy = "whelk" + project = "winkle" + location = "nautilus" + taxonomy = "scallop" expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( project=project, location=location, taxonomy=taxonomy, @@ -3682,9 +3764,9 @@ def test_taxonomy_path(): def test_parse_taxonomy_path(): expected = { - "project": "octopus", - "location": "oyster", - "taxonomy": "nudibranch", + "project": "abalone", + "location": "squid", + "taxonomy": "clam", } path = PolicyTagManagerClient.taxonomy_path(**expected) @@ -3693,6 +3775,107 @@ def test_parse_taxonomy_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "whelk" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PolicyTagManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PolicyTagManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PolicyTagManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PolicyTagManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PolicyTagManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PolicyTagManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index 4b8f9dd6..cb5be9a9 100644 --- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -108,12 +108,12 @@ def test_policy_tag_manager_serialization_client_from_service_account_file( ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_policy_tag_manager_serialization_client_get_transport_class(): @@ -177,15 +177,14 @@ def test_policy_tag_manager_serialization_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -194,15 +193,14 @@ def test_policy_tag_manager_serialization_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -211,95 +209,185 @@ def test_policy_tag_manager_serialization_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "true", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PolicyTagManagerSerializationClient, + transports.PolicyTagManagerSerializationGrpcTransport, + "grpc", + "false", + ), + ( + PolicyTagManagerSerializationAsyncClient, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PolicyTagManagerSerializationClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationClient), +) +@mock.patch.object( + PolicyTagManagerSerializationAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PolicyTagManagerSerializationAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_serialization_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -330,8 +418,7 @@ def test_policy_tag_manager_serialization_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -365,8 +452,7 @@ def test_policy_tag_manager_serialization_client_client_options_credentials_file credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -385,8 +471,7 @@ def test_policy_tag_manager_serialization_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -406,7 +491,7 @@ def test_import_taxonomies( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.import_taxonomies), "__call__" + type(client.transport.import_taxonomies), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() @@ -420,6 +505,7 @@ def test_import_taxonomies( assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) @@ -428,18 +514,21 @@ def test_import_taxonomies_from_dict(): @pytest.mark.asyncio -async def test_import_taxonomies_async(transport: str = "grpc_asyncio"): +async def test_import_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ImportTaxonomiesRequest, +): client = PolicyTagManagerSerializationAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanagerserialization.ImportTaxonomiesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.import_taxonomies), "__call__" + type(client.transport.import_taxonomies), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -452,12 +541,17 @@ async def test_import_taxonomies_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) +@pytest.mark.asyncio +async def test_import_taxonomies_async_from_dict(): + await test_import_taxonomies_async(request_type=dict) + + def test_import_taxonomies_field_headers(): client = PolicyTagManagerSerializationClient( credentials=credentials.AnonymousCredentials(), @@ -470,7 +564,7 @@ def test_import_taxonomies_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.import_taxonomies), "__call__" + type(client.transport.import_taxonomies), "__call__" ) as call: call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() @@ -499,7 +593,7 @@ async def test_import_taxonomies_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.import_taxonomies), "__call__" + type(client.transport.import_taxonomies), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ImportTaxonomiesResponse() @@ -531,7 +625,7 @@ def test_export_taxonomies( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.export_taxonomies), "__call__" + type(client.transport.export_taxonomies), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() @@ -545,6 +639,7 @@ def test_export_taxonomies( assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) @@ -553,18 +648,21 @@ def test_export_taxonomies_from_dict(): @pytest.mark.asyncio -async def test_export_taxonomies_async(transport: str = "grpc_asyncio"): +async def test_export_taxonomies_async( + transport: str = "grpc_asyncio", + request_type=policytagmanagerserialization.ExportTaxonomiesRequest, +): client = PolicyTagManagerSerializationAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = policytagmanagerserialization.ExportTaxonomiesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.export_taxonomies), "__call__" + type(client.transport.export_taxonomies), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -577,12 +675,17 @@ async def test_export_taxonomies_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() # Establish that the response is the type that we expect. assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) +@pytest.mark.asyncio +async def test_export_taxonomies_async_from_dict(): + await test_export_taxonomies_async(request_type=dict) + + def test_export_taxonomies_field_headers(): client = PolicyTagManagerSerializationClient( credentials=credentials.AnonymousCredentials(), @@ -595,7 +698,7 @@ def test_export_taxonomies_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.export_taxonomies), "__call__" + type(client.transport.export_taxonomies), "__call__" ) as call: call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() @@ -624,7 +727,7 @@ async def test_export_taxonomies_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.export_taxonomies), "__call__" + type(client.transport.export_taxonomies), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policytagmanagerserialization.ExportTaxonomiesResponse() @@ -678,7 +781,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = PolicyTagManagerSerializationClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -696,13 +799,28 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PolicyTagManagerSerializationClient( credentials=credentials.AnonymousCredentials(), ) assert isinstance( - client._transport, transports.PolicyTagManagerSerializationGrpcTransport, + client.transport, transports.PolicyTagManagerSerializationGrpcTransport, ) @@ -755,6 +873,17 @@ def test_policy_tag_manager_serialization_base_transport_with_credentials_file() ) +def test_policy_tag_manager_serialization_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport() + adc.assert_called_once() + + def test_policy_tag_manager_serialization_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -787,7 +916,7 @@ def test_policy_tag_manager_serialization_host_no_port(): api_endpoint="datacatalog.googleapis.com" ), ) - assert client._transport._host == "datacatalog.googleapis.com:443" + assert client.transport._host == "datacatalog.googleapis.com:443" def test_policy_tag_manager_serialization_host_with_port(): @@ -797,185 +926,251 @@ def test_policy_tag_manager_serialization_host_with_port(): api_endpoint="datacatalog.googleapis.com:8000" ), ) - assert client._transport._host == "datacatalog.googleapis.com:8000" + assert client.transport._host == "datacatalog.googleapis.com:8000" def test_policy_tag_manager_serialization_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_policy_tag_manager_serialization_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( + transport_class, ): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( + transport_class, +): mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - transport = transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" +def test_taxonomy_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format( + project=project, location=location, taxonomy=taxonomy, ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, + actual = PolicyTagManagerSerializationClient.taxonomy_path( + project, location, taxonomy ) - assert transport.grpc_channel == mock_grpc_channel + assert expected == actual -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() +def test_parse_taxonomy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", + } + path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) + assert expected == actual - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, + actual = PolicyTagManagerSerializationClient.common_billing_account_path( + billing_account ) - assert transport.grpc_channel == mock_grpc_channel + assert expected == actual -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_policy_tag_manager_serialization_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) + assert expected == actual -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +def test_common_folder_path(): + folder = "winkle" - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + expected = "folders/{folder}".format(folder=folder,) + actual = PolicyTagManagerSerializationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = PolicyTagManagerSerializationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = PolicyTagManagerSerializationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = PolicyTagManagerSerializationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = PolicyTagManagerSerializationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = PolicyTagManagerSerializationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PolicyTagManagerSerializationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = PolicyTagManagerSerializationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) + assert expected == actual def test_client_withDEFAULT_CLIENT_INFO():