From 90ef156dbf96ba9550a55095982ce0d327120d50 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 May 2021 12:56:07 +0000 Subject: [PATCH] chore: upgrade gapic-generator-python to 0.46.3 (#299) PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0a3c7d272d697796db75857bac73905c68e498c3 --- google/cloud/logging_v2/gapic_metadata.json | 391 ++++++ google/cloud/logging_v2/services/__init__.py | 1 - .../services/config_service_v2/__init__.py | 2 - .../config_service_v2/async_client.py | 120 +- .../services/config_service_v2/client.py | 143 +- .../services/config_service_v2/pagers.py | 10 +- .../config_service_v2/transports/__init__.py | 2 - .../config_service_v2/transports/base.py | 258 ++-- .../config_service_v2/transports/grpc.py | 48 +- .../transports/grpc_asyncio.py | 45 +- .../services/logging_service_v2/__init__.py | 2 - .../logging_service_v2/async_client.py | 76 +- .../services/logging_service_v2/client.py | 79 +- .../services/logging_service_v2/pagers.py | 14 +- .../logging_service_v2/transports/__init__.py | 2 - .../logging_service_v2/transports/base.py | 174 ++- .../logging_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 29 +- .../services/metrics_service_v2/__init__.py | 2 - .../metrics_service_v2/async_client.py | 65 +- .../services/metrics_service_v2/client.py | 83 +- .../services/metrics_service_v2/pagers.py | 4 +- .../metrics_service_v2/transports/__init__.py | 2 - .../metrics_service_v2/transports/base.py | 152 +- .../metrics_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 29 +- google/cloud/logging_v2/types/__init__.py | 2 - google/cloud/logging_v2/types/log_entry.py | 78 +- google/cloud/logging_v2/types/logging.py | 99 +- .../cloud/logging_v2/types/logging_config.py | 257 ++-- .../cloud/logging_v2/types/logging_metrics.py | 65 +- tests/__init__.py | 15 + tests/unit/__init__.py | 4 +- tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/logging_v2/__init__.py | 1 - .../logging_v2/test_config_service_v2.py | 1223 +++++++++-------- .../logging_v2/test_logging_service_v2.py | 576 +++++--- .../logging_v2/test_metrics_service_v2.py | 530 ++++--- 38 files changed, 2604 insertions(+), 2050 deletions(-) create mode 100644 google/cloud/logging_v2/gapic_metadata.json create mode 100644 tests/unit/gapic/__init__.py diff --git a/google/cloud/logging_v2/gapic_metadata.json b/google/cloud/logging_v2/gapic_metadata.json new file mode 100644 index 00000000..da4eefd4 --- /dev/null +++ b/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,391 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsServiceV2AsyncClient", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/logging_v2/services/__init__.py b/google/cloud/logging_v2/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/logging_v2/services/__init__.py +++ b/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/logging_v2/services/config_service_v2/__init__.py b/google/cloud/logging_v2/services/config_service_v2/__init__.py index 4ab8f4d4..e7f60428 100644 --- a/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import ConfigServiceV2Client from .async_client import ConfigServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index ef184d61..634c106b 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client @@ -60,31 +57,26 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) - common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ConfigServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( ConfigServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( ConfigServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( ConfigServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) parse_common_project_path = staticmethod( ConfigServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) parse_common_location_path = staticmethod( ConfigServiceV2Client.parse_common_location_path @@ -92,7 +84,8 @@ class ConfigServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -107,7 +100,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -124,7 +117,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: ConfigServiceV2Transport: The transport used by the client instance. @@ -138,12 +131,12 @@ def transport(self) -> ConfigServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -175,7 +168,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = ConfigServiceV2Client( credentials=credentials, transport=transport, @@ -215,7 +207,6 @@ async def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -244,7 +235,6 @@ async def list_buckets( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -287,7 +277,6 @@ async def get_bucket( Args: request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -299,7 +288,6 @@ async def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -337,7 +325,6 @@ async def create_bucket( Args: request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -349,7 +336,6 @@ async def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -395,7 +381,6 @@ async def update_bucket( Args: request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -407,7 +392,6 @@ async def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -445,7 +429,6 @@ async def delete_bucket( Args: request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -453,7 +436,6 @@ async def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -489,7 +471,6 @@ async def undelete_bucket( Args: request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -497,7 +478,6 @@ async def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -543,7 +523,6 @@ async def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -572,7 +551,6 @@ async def list_views( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -615,7 +593,6 @@ async def get_view( Args: request (:class:`google.cloud.logging_v2.types.GetViewRequest`): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,7 +606,6 @@ async def get_view( """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -666,7 +642,6 @@ async def create_view( Args: request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -680,7 +655,6 @@ async def create_view( """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -717,7 +691,6 @@ async def update_view( Args: request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -731,7 +704,6 @@ async def update_view( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -767,7 +739,6 @@ async def delete_view( Args: request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -775,7 +746,6 @@ async def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -825,7 +795,6 @@ async def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -854,7 +823,6 @@ async def list_sinks( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -867,9 +835,9 @@ async def list_sinks( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -924,7 +892,6 @@ async def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -958,7 +925,6 @@ async def get_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -971,9 +937,9 @@ async def get_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1037,7 +1003,6 @@ async def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1071,7 +1036,6 @@ async def create_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1103,7 +1067,7 @@ async def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1161,7 +1125,6 @@ async def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1195,7 +1158,6 @@ async def update_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1212,9 +1174,9 @@ async def update_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1267,7 +1229,6 @@ async def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1288,7 +1249,6 @@ async def delete_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1301,9 +1261,9 @@ async def delete_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1352,7 +1312,6 @@ async def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1381,7 +1340,6 @@ async def list_exclusions( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1394,9 +1352,9 @@ async def list_exclusions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1452,7 +1410,6 @@ async def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1488,7 +1445,6 @@ async def get_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1501,9 +1457,9 @@ async def get_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1566,7 +1522,6 @@ async def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1602,7 +1557,6 @@ async def create_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1634,7 +1588,7 @@ async def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1683,7 +1637,6 @@ async def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1719,7 +1672,6 @@ async def update_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1778,7 +1730,6 @@ async def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1799,7 +1750,6 @@ async def delete_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1812,9 +1762,9 @@ async def delete_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1858,7 +1808,6 @@ async def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1882,7 +1831,6 @@ async def get_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1936,7 +1884,6 @@ async def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1960,7 +1907,6 @@ async def update_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 37a28d7a..d2b32322 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +32,8 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport @@ -57,7 +54,7 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -80,7 +77,8 @@ class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -114,7 +112,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -131,7 +130,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -150,34 +149,35 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - ConfigServiceV2Transport: The transport used by the client instance. + ConfigServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def cmek_settings_path(project: str,) -> str: - """Return a fully-qualified cmek_settings string.""" + """Returns a fully-qualified cmek_settings string.""" return "projects/{project}/cmekSettings".format(project=project,) @staticmethod def parse_cmek_settings_path(path: str) -> Dict[str, str]: - """Parse a cmek_settings path into its component segments.""" + """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod def log_bucket_path(project: str, location: str, bucket: str,) -> str: - """Return a fully-qualified log_bucket string.""" + """Returns a fully-qualified log_bucket string.""" return "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @staticmethod def parse_log_bucket_path(path: str) -> Dict[str, str]: - """Parse a log_bucket path into its component segments.""" + """Parses a log_bucket path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path, @@ -186,38 +186,38 @@ def parse_log_bucket_path(path: str) -> Dict[str, str]: @staticmethod def log_exclusion_path(project: str, exclusion: str,) -> str: - """Return a fully-qualified log_exclusion string.""" + """Returns a fully-qualified log_exclusion string.""" return "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @staticmethod def parse_log_exclusion_path(path: str) -> Dict[str, str]: - """Parse a log_exclusion path into its component segments.""" + """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_sink_path(project: str, sink: str,) -> str: - """Return a fully-qualified log_sink string.""" + """Returns a fully-qualified log_sink string.""" return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) @staticmethod def parse_log_sink_path(path: str) -> Dict[str, str]: - """Parse a log_sink path into its component segments.""" + """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: - """Return a fully-qualified log_view string.""" + """Returns a fully-qualified log_view string.""" return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @staticmethod def parse_log_view_path(path: str) -> Dict[str, str]: - """Parse a log_view path into its component segments.""" + """Parses a log_view path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path, @@ -226,7 +226,7 @@ def parse_log_view_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -239,7 +239,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -250,7 +250,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -261,7 +261,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -272,7 +272,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -286,12 +286,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -346,9 +346,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -360,12 +361,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -380,8 +383,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -428,7 +431,6 @@ def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -459,10 +461,8 @@ def list_buckets( # there are no flattened fields. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -501,7 +501,6 @@ def get_bucket( Args: request (google.cloud.logging_v2.types.GetBucketRequest): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -513,7 +512,6 @@ def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetBucketRequest. # There's no risk of modifying the input as we've already verified @@ -552,7 +550,6 @@ def create_bucket( Args: request (google.cloud.logging_v2.types.CreateBucketRequest): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -564,7 +561,6 @@ def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -611,7 +607,6 @@ def update_bucket( Args: request (google.cloud.logging_v2.types.UpdateBucketRequest): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -623,7 +618,6 @@ def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -662,7 +656,6 @@ def delete_bucket( Args: request (google.cloud.logging_v2.types.DeleteBucketRequest): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -670,7 +663,6 @@ def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -707,7 +699,6 @@ def undelete_bucket( Args: request (google.cloud.logging_v2.types.UndeleteBucketRequest): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -715,7 +706,6 @@ def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UndeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -762,7 +752,6 @@ def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -793,10 +782,8 @@ def list_views( # there are no flattened fields. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -835,7 +822,6 @@ def get_view( Args: request (google.cloud.logging_v2.types.GetViewRequest): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -849,7 +835,6 @@ def get_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetViewRequest. # There's no risk of modifying the input as we've already verified @@ -887,7 +872,6 @@ def create_view( Args: request (google.cloud.logging_v2.types.CreateViewRequest): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -901,7 +885,6 @@ def create_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateViewRequest. # There's no risk of modifying the input as we've already verified @@ -939,7 +922,6 @@ def update_view( Args: request (google.cloud.logging_v2.types.UpdateViewRequest): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -953,7 +935,6 @@ def update_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateViewRequest. # There's no risk of modifying the input as we've already verified @@ -990,7 +971,6 @@ def delete_view( Args: request (google.cloud.logging_v2.types.DeleteViewRequest): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -998,7 +978,6 @@ def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteViewRequest. # There's no risk of modifying the input as we've already verified @@ -1049,7 +1028,6 @@ def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1080,10 +1058,8 @@ def list_sinks( # there are no flattened fields. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1138,7 +1114,6 @@ def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1174,10 +1149,8 @@ def get_sink( # there are no flattened fields. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1241,7 +1214,6 @@ def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1277,10 +1249,8 @@ def create_sink( # there are no flattened fields. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1308,7 +1278,7 @@ def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1366,7 +1336,6 @@ def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1402,10 +1371,8 @@ def update_sink( # there are no flattened fields. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1462,7 +1429,6 @@ def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1485,10 +1451,8 @@ def delete_sink( # there are no flattened fields. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1537,7 +1501,6 @@ def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1568,10 +1531,8 @@ def list_exclusions( # there are no flattened fields. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1627,7 +1588,6 @@ def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1665,10 +1625,8 @@ def get_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1731,7 +1689,6 @@ def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1769,10 +1726,8 @@ def create_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1800,7 +1755,7 @@ def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1849,7 +1804,6 @@ def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1887,10 +1841,8 @@ def update_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1945,7 +1897,6 @@ def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1968,10 +1919,8 @@ def delete_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2015,7 +1964,6 @@ def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2039,7 +1987,6 @@ def get_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetCmekSettingsRequest. # There's no risk of modifying the input as we've already verified @@ -2094,7 +2041,6 @@ def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2118,7 +2064,6 @@ def update_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateCmekSettingsRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index f656fef0..6d8e11fb 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -501,7 +499,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 30282e2d..b1e24fc6 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 3981d8e9..d52c9763 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -47,21 +57,24 @@ class ConfigServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.read", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -185,9 +245,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -201,9 +261,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -217,9 +277,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -233,9 +293,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -255,9 +315,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,11 +337,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_buckets( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListBucketsRequest], - typing.Union[ + Union[ logging_config.ListBucketsResponse, - typing.Awaitable[logging_config.ListBucketsResponse], + Awaitable[logging_config.ListBucketsResponse], ], ]: raise NotImplementedError() @@ -289,62 +349,56 @@ def list_buckets( @property def get_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def create_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def update_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def delete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def undelete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UndeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_views( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListViewsRequest], - typing.Union[ + Union[ logging_config.ListViewsResponse, - typing.Awaitable[logging_config.ListViewsResponse], + Awaitable[logging_config.ListViewsResponse], ], ]: raise NotImplementedError() @@ -352,47 +406,47 @@ def list_views( @property def get_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def create_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def update_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def delete_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteViewRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_sinks( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListSinksRequest], - typing.Union[ + Union[ logging_config.ListSinksResponse, - typing.Awaitable[logging_config.ListSinksResponse], + Awaitable[logging_config.ListSinksResponse], ], ]: raise NotImplementedError() @@ -400,47 +454,47 @@ def list_sinks( @property def get_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def create_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def update_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def delete_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteSinkRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_exclusions( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListExclusionsRequest], - typing.Union[ + Union[ logging_config.ListExclusionsResponse, - typing.Awaitable[logging_config.ListExclusionsResponse], + Awaitable[logging_config.ListExclusionsResponse], ], ]: raise NotImplementedError() @@ -448,64 +502,54 @@ def list_exclusions( @property def get_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def create_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def update_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def delete_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteExclusionRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() @property def update_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 67f2ea70..327cc79c 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -343,7 +343,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -364,14 +364,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -391,7 +391,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -502,7 +502,9 @@ def update_view( return self._stubs["update_view"] @property - def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -521,7 +523,7 @@ def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empt self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -639,7 +641,9 @@ def update_sink( return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -659,7 +663,7 @@ def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empt self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -776,7 +780,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -795,7 +799,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3eabb2bc..3d7d271b 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -54,7 +51,7 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -354,7 +353,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -375,14 +374,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -402,7 +401,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -521,7 +520,7 @@ def update_view( @property def delete_view( self, - ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -540,7 +539,7 @@ def delete_view( self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -666,7 +665,7 @@ def update_sink( @property def delete_sink( self, - ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -686,7 +685,7 @@ def delete_sink( self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -810,7 +809,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -829,7 +828,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/__init__.py index c46b48a2..bd7a7982 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import LoggingServiceV2Client from .async_client import LoggingServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f6155692..6a11e96c 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -31,17 +29,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -57,31 +54,26 @@ class LoggingServiceV2AsyncClient: log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod( LoggingServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( LoggingServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( LoggingServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( LoggingServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( LoggingServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) parse_common_project_path = staticmethod( LoggingServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) parse_common_location_path = staticmethod( LoggingServiceV2Client.parse_common_location_path @@ -89,7 +81,8 @@ class LoggingServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -104,7 +97,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -121,7 +114,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: LoggingServiceV2Transport: The transport used by the client instance. @@ -135,12 +128,12 @@ def transport(self) -> LoggingServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,7 +165,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = LoggingServiceV2Client( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -238,7 +229,6 @@ async def delete_log( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -251,9 +241,9 @@ async def delete_log( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,7 +267,7 @@ async def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -383,7 +373,6 @@ async def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -408,7 +397,6 @@ async def write_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -416,7 +404,6 @@ async def write_log_entries( if labels: request.labels.update(labels) - if entries: request.entries.extend(entries) @@ -429,9 +416,9 @@ async def write_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -514,7 +501,6 @@ async def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -543,12 +529,10 @@ async def list_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if filter is not None: request.filter = filter if order_by is not None: request.order_by = order_by - if resource_names: request.resource_names.extend(resource_names) @@ -561,9 +545,9 @@ async def list_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -598,7 +582,6 @@ async def list_monitored_resource_descriptors( request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -615,7 +598,6 @@ async def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -627,9 +609,9 @@ async def list_monitored_resource_descriptors( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -678,7 +660,6 @@ async def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -707,7 +688,6 @@ async def list_logs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -720,9 +700,9 @@ async def list_logs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -783,9 +763,9 @@ def tail_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 00d758ab..dd94b672 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -33,20 +31,19 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -67,7 +64,7 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -90,7 +87,8 @@ class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -124,7 +122,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -141,7 +140,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -160,27 +159,28 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - LoggingServiceV2Transport: The transport used by the client instance. + LoggingServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_path(project: str, log: str,) -> str: - """Return a fully-qualified log string.""" + """Returns a fully-qualified log string.""" return "projects/{project}/logs/{log}".format(project=project, log=log,) @staticmethod def parse_log_path(path: str) -> Dict[str, str]: - """Parse a log path into its component segments.""" + """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -193,7 +193,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -204,7 +204,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -215,7 +215,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -226,7 +226,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -240,12 +240,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -300,9 +300,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -314,12 +315,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -334,8 +337,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -387,7 +390,6 @@ def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -410,10 +412,8 @@ def delete_log( # there are no flattened fields. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -437,7 +437,7 @@ def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -543,7 +543,6 @@ def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -570,10 +569,8 @@ def write_log_entries( # there are no flattened fields. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -662,7 +659,6 @@ def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -693,10 +689,8 @@ def list_log_entries( # there are no flattened fields. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if resource_names is not None: request.resource_names = resource_names if filter is not None: @@ -735,7 +729,6 @@ def list_monitored_resource_descriptors( request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -752,7 +745,6 @@ def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging.ListMonitoredResourceDescriptorsRequest. # There's no risk of modifying the input as we've already verified @@ -807,7 +799,6 @@ def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -838,10 +829,8 @@ def list_logs( # there are no flattened fields. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 7ab8ac8d..b06007cb 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -26,7 +24,7 @@ Optional, ) -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -213,7 +211,7 @@ def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -249,7 +247,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -281,7 +279,7 @@ async def pages( def __aiter__( self, - ) -> AsyncIterable[monitored_resource.MonitoredResourceDescriptor]: + ) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -381,7 +379,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index cd979b77..65e71312 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 66003ef9..fdcbead0 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -48,21 +58,24 @@ class LoggingServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -182,9 +242,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -198,9 +258,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), @@ -212,20 +272,18 @@ def _prep_wrapped_messages(self, client_info): @property def delete_log( self, - ) -> typing.Callable[ - [logging.DeleteLogRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [logging.DeleteLogRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def write_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.WriteLogEntriesRequest], - typing.Union[ - logging.WriteLogEntriesResponse, - typing.Awaitable[logging.WriteLogEntriesResponse], + Union[ + logging.WriteLogEntriesResponse, Awaitable[logging.WriteLogEntriesResponse] ], ]: raise NotImplementedError() @@ -233,11 +291,10 @@ def write_log_entries( @property def list_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogEntriesRequest], - typing.Union[ - logging.ListLogEntriesResponse, - typing.Awaitable[logging.ListLogEntriesResponse], + Union[ + logging.ListLogEntriesResponse, Awaitable[logging.ListLogEntriesResponse] ], ]: raise NotImplementedError() @@ -245,11 +302,11 @@ def list_log_entries( @property def list_monitored_resource_descriptors( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListMonitoredResourceDescriptorsRequest], - typing.Union[ + Union[ logging.ListMonitoredResourceDescriptorsResponse, - typing.Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], ], ]: raise NotImplementedError() @@ -257,22 +314,19 @@ def list_monitored_resource_descriptors( @property def list_logs( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogsRequest], - typing.Union[ - logging.ListLogsResponse, typing.Awaitable[logging.ListLogsResponse] - ], + Union[logging.ListLogsResponse, Awaitable[logging.ListLogsResponse]], ]: raise NotImplementedError() @property def tail_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.TailLogEntriesRequest], - typing.Union[ - logging.TailLogEntriesResponse, - typing.Awaitable[logging.TailLogEntriesResponse], + Union[ + logging.TailLogEntriesResponse, Awaitable[logging.TailLogEntriesResponse] ], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index b52d306f..5e5c1ad0 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -223,7 +223,7 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -246,7 +246,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 0ba87029..1f33ad78 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -54,7 +51,7 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -229,7 +228,7 @@ def grpc_channel(self) -> aio.Channel: @property def delete_log( self, - ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -252,7 +251,7 @@ def delete_log( self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index c857ea03..f37e3931 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import MetricsServiceV2Client from .async_client import MetricsServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 93a652b7..defd64a1 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,19 +20,17 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client @@ -50,31 +46,26 @@ class MetricsServiceV2AsyncClient: log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod( MetricsServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( MetricsServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( MetricsServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( MetricsServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( MetricsServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) parse_common_project_path = staticmethod( MetricsServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) parse_common_location_path = staticmethod( MetricsServiceV2Client.parse_common_location_path @@ -82,7 +73,8 @@ class MetricsServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -97,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -114,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: MetricsServiceV2Transport: The transport used by the client instance. @@ -128,12 +120,12 @@ def transport(self) -> MetricsServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +157,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = MetricsServiceV2Client( credentials=credentials, transport=transport, @@ -198,7 +189,6 @@ async def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -227,7 +217,6 @@ async def list_log_metrics( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -240,9 +229,9 @@ async def list_log_metrics( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -292,7 +281,6 @@ async def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -328,7 +316,6 @@ async def get_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -341,9 +328,9 @@ async def get_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -401,7 +388,6 @@ async def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -437,7 +423,6 @@ async def create_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -498,7 +483,6 @@ async def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -534,7 +518,6 @@ async def update_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -549,9 +532,9 @@ async def update_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -597,7 +580,6 @@ async def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -618,7 +600,6 @@ async def delete_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -631,9 +612,9 @@ async def delete_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 850236a5..6dcbcdfb 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,22 +21,20 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -59,7 +55,7 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +78,8 @@ class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +113,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +131,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,29 +150,30 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - MetricsServiceV2Transport: The transport used by the client instance. + MetricsServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_metric_path(project: str, metric: str,) -> str: - """Return a fully-qualified log_metric string.""" + """Returns a fully-qualified log_metric string.""" return "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @staticmethod def parse_log_metric_path(path: str) -> Dict[str, str]: - """Parse a log_metric path into its component segments.""" + """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -187,7 +186,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -198,7 +197,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -209,7 +208,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -220,7 +219,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -234,12 +233,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -294,9 +293,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -308,12 +308,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -328,8 +330,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -369,7 +371,6 @@ def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -400,10 +401,8 @@ def list_log_metrics( # there are no flattened fields. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -453,7 +452,6 @@ def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -491,10 +489,8 @@ def get_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -552,7 +548,6 @@ def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -590,10 +585,8 @@ def create_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -650,7 +643,6 @@ def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -688,10 +680,8 @@ def update_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -739,7 +729,6 @@ def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -762,10 +751,8 @@ def delete_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 15134ac5..8ff178d2 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index f748403b..10ccb830 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index c6ae3da4..814f6259 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -48,21 +58,24 @@ class MetricsServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -153,9 +213,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -169,9 +229,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -183,11 +243,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_log_metrics( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.ListLogMetricsRequest], - typing.Union[ + Union[ logging_metrics.ListLogMetricsResponse, - typing.Awaitable[logging_metrics.ListLogMetricsResponse], + Awaitable[logging_metrics.ListLogMetricsResponse], ], ]: raise NotImplementedError() @@ -195,42 +255,36 @@ def list_log_metrics( @property def get_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.GetLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def create_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.CreateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def update_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.UpdateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def delete_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.DeleteLogMetricRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index a9447ac2..1c9b3dde 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -331,7 +331,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty.Empty]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -350,7 +350,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 94017be9..62a0bf0f 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -54,7 +51,7 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -342,7 +341,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -361,7 +360,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index 9519c077..7d1cdd99 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .log_entry import ( LogEntry, LogEntryOperation, diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index e63d6086..6c57b22d 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore -from google.logging.type import http_request_pb2 as glt_http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as gp_timestamp # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -34,7 +31,6 @@ class LogEntry(proto.Message): r"""An individual entry in a log. - Attributes: log_name (str): Required. The resource name of the log to which this log @@ -163,46 +159,31 @@ class LogEntry(proto.Message): associated with the log entry, if any. """ - log_name = proto.Field(proto.STRING, number=12) - + log_name = proto.Field(proto.STRING, number=12,) resource = proto.Field( - proto.MESSAGE, number=8, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, ) - proto_payload = proto.Field( - proto.MESSAGE, number=2, oneof="payload", message=gp_any.Any, + proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, ) - - text_payload = proto.Field(proto.STRING, number=3, oneof="payload") - + text_payload = proto.Field(proto.STRING, number=3, oneof="payload",) json_payload = proto.Field( - proto.MESSAGE, number=6, oneof="payload", message=struct.Struct, + proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, ) - - timestamp = proto.Field(proto.MESSAGE, number=9, message=gp_timestamp.Timestamp,) - + timestamp = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) receive_timestamp = proto.Field( - proto.MESSAGE, number=24, message=gp_timestamp.Timestamp, + proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, ) - - severity = proto.Field(proto.ENUM, number=10, enum=log_severity.LogSeverity,) - - insert_id = proto.Field(proto.STRING, number=4) - + severity = proto.Field(proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity,) + insert_id = proto.Field(proto.STRING, number=4,) http_request = proto.Field( - proto.MESSAGE, number=7, message=glt_http_request.HttpRequest, + proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=11) - + labels = proto.MapField(proto.STRING, proto.STRING, number=11,) operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) - - trace = proto.Field(proto.STRING, number=22) - - span_id = proto.Field(proto.STRING, number=27) - - trace_sampled = proto.Field(proto.BOOL, number=30) - + trace = proto.Field(proto.STRING, number=22,) + span_id = proto.Field(proto.STRING, number=27,) + trace_sampled = proto.Field(proto.BOOL, number=30,) source_location = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) @@ -230,13 +211,10 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field(proto.STRING, number=1) - - producer = proto.Field(proto.STRING, number=2) - - first = proto.Field(proto.BOOL, number=3) - - last = proto.Field(proto.BOOL, number=4) + id = proto.Field(proto.STRING, number=1,) + producer = proto.Field(proto.STRING, number=2,) + first = proto.Field(proto.BOOL, number=3,) + last = proto.Field(proto.BOOL, number=4,) class LogEntrySourceLocation(proto.Message): @@ -261,11 +239,9 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field(proto.STRING, number=1) - - line = proto.Field(proto.INT64, number=2) - - function = proto.Field(proto.STRING, number=3) + file = proto.Field(proto.STRING, number=1,) + line = proto.Field(proto.INT64, number=2,) + function = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index ca739c02..6d64b9a9 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry -from google.protobuf import duration_pb2 as duration # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -45,7 +42,6 @@ class DeleteLogRequest(proto.Message): r"""The parameters to DeleteLog. - Attributes: log_name (str): Required. The resource name of the log to delete: @@ -64,12 +60,11 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field(proto.STRING, number=1) + log_name = proto.Field(proto.STRING, number=1,) class WriteLogEntriesRequest(proto.Message): r"""The parameters to WriteLogEntries. - Attributes: log_name (str): Optional. A default log resource name that is assigned to @@ -158,28 +153,22 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field(proto.STRING, number=1) - + log_name = proto.Field(proto.STRING, number=1,) resource = proto.Field( - proto.MESSAGE, number=2, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) - - partial_success = proto.Field(proto.BOOL, number=5) - - dry_run = proto.Field(proto.BOOL, number=6) + partial_success = proto.Field(proto.BOOL, number=5,) + dry_run = proto.Field(proto.BOOL, number=6,) class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries.""" + r"""Result returned from WriteLogEntries. """ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. - Attributes: log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, @@ -192,13 +181,12 @@ class WriteLogEntriesPartialErrors(proto.Message): """ log_entry_errors = proto.MapField( - proto.INT32, proto.MESSAGE, number=1, message=status.Status, + proto.INT32, proto.MESSAGE, number=1, message=status_pb2.Status, ) class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Names of one or more parent resources from which @@ -252,20 +240,15 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField(proto.STRING, number=8) - - filter = proto.Field(proto.STRING, number=2) - - order_by = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) - - page_token = proto.Field(proto.STRING, number=5) + resource_names = proto.RepeatedField(proto.STRING, number=8,) + filter = proto.Field(proto.STRING, number=2,) + order_by = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, @@ -293,13 +276,11 @@ def raw_page(self): return self entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The parameters to ListMonitoredResourceDescriptors - Attributes: page_size (int): Optional. The maximum number of results to return from this @@ -314,14 +295,12 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field(proto.INT32, number=1) - - page_token = proto.Field(proto.STRING, number=2) + page_size = proto.Field(proto.INT32, number=1,) + page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. - Attributes: resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. @@ -337,15 +316,15 @@ def raw_page(self): return self resource_descriptors = proto.RepeatedField( - proto.MESSAGE, number=1, message=monitored_resource.MonitoredResourceDescriptor, + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListLogsRequest(proto.Message): r"""The parameters to ListLogs. - Attributes: parent (str): Required. The resource name that owns the logs: @@ -379,18 +358,14 @@ class ListLogsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - resource_names = proto.RepeatedField(proto.STRING, number=8) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + resource_names = proto.RepeatedField(proto.STRING, number=8,) class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. - Attributes: log_names (Sequence[str]): A list of log names. For example, @@ -407,14 +382,12 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField(proto.STRING, number=3) - - next_page_token = proto.Field(proto.STRING, number=2) + log_names = proto.RepeatedField(proto.STRING, number=3,) + next_page_token = proto.Field(proto.STRING, number=2,) class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Name of a parent resource from which to retrieve @@ -451,16 +424,13 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + resource_names = proto.RepeatedField(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will @@ -480,7 +450,6 @@ class TailLogEntriesResponse(proto.Message): class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. - Attributes: reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the @@ -499,11 +468,9 @@ class Reason(proto.Enum): reason = proto.Field( proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) - - suppressed_count = proto.Field(proto.INT32, number=2) + suppressed_count = proto.Field(proto.INT32, number=2,) entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - suppression_info = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, ) diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 0d1f896e..9b628073 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -72,7 +69,6 @@ class LifecycleState(proto.Enum): class LogBucket(proto.Message): r"""Describes a repository of logs. - Attributes: name (str): The resource name of the bucket. For example: @@ -107,24 +103,17 @@ class LogBucket(proto.Message): Output only. The bucket lifecycle state. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - retention_days = proto.Field(proto.INT32, number=11) - - locked = proto.Field(proto.BOOL, number=9) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + retention_days = proto.Field(proto.INT32, number=11,) + locked = proto.Field(proto.BOOL, number=9,) lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) class LogView(proto.Message): r"""Describes a view over logs in a bucket. - Attributes: name (str): The resource name of the view. @@ -148,15 +137,11 @@ class LogView(proto.Message): resource.type = "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - filter = proto.Field(proto.STRING, number=7) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + filter = proto.Field(proto.STRING, number=7,) class LogSink(proto.Message): @@ -267,31 +252,24 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field(proto.STRING, number=1) - - destination = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=5) - - description = proto.Field(proto.STRING, number=18) - - disabled = proto.Field(proto.BOOL, number=19) - + name = proto.Field(proto.STRING, number=1,) + destination = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=5,) + description = proto.Field(proto.STRING, number=18,) + disabled = proto.Field(proto.BOOL, number=19,) exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) - output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) - - writer_identity = proto.Field(proto.STRING, number=8) - - include_children = proto.Field(proto.BOOL, number=9) - + writer_identity = proto.Field(proto.STRING, number=8,) + include_children = proto.Field(proto.BOOL, number=9,) bigquery_options = proto.Field( proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", ) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) class BigQueryOptions(proto.Message): @@ -319,14 +297,12 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field(proto.BOOL, number=1) - - uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3) + use_partitioned_tables = proto.Field(proto.BOOL, number=1,) + uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3,) class ListBucketsRequest(proto.Message): r"""The parameters to ``ListBuckets``. - Attributes: parent (str): Required. The parent resource whose buckets are to be @@ -355,16 +331,13 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. - Attributes: buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. @@ -380,13 +353,11 @@ def raw_page(self): return self buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateBucketRequest(proto.Message): r"""The parameters to ``CreateBucket``. - Attributes: parent (str): Required. The resource in which to create the bucket: @@ -408,16 +379,13 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field(proto.STRING, number=1) - - bucket_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + bucket_id = proto.Field(proto.STRING, number=2,) bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) class UpdateBucketRequest(proto.Message): r"""The parameters to ``UpdateBucket``. - Attributes: name (str): Required. The full resource name of the bucket to update. @@ -448,16 +416,15 @@ class UpdateBucketRequest(proto.Message): Example: ``updateMask=retention_days``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetBucketRequest(proto.Message): r"""The parameters to ``GetBucket``. - Attributes: name (str): Required. The resource name of the bucket: @@ -473,12 +440,11 @@ class GetBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteBucketRequest(proto.Message): r"""The parameters to ``DeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to delete. @@ -494,12 +460,11 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UndeleteBucketRequest(proto.Message): r"""The parameters to ``UndeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to undelete. @@ -515,12 +480,11 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListViewsRequest(proto.Message): r"""The parameters to ``ListViews``. - Attributes: parent (str): Required. The bucket whose views are to be listed: @@ -541,16 +505,13 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListViewsResponse(proto.Message): r"""The response from ListViews. - Attributes: views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. @@ -566,13 +527,11 @@ def raw_page(self): return self views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateViewRequest(proto.Message): r"""The parameters to ``CreateView``. - Attributes: parent (str): Required. The bucket in which to create the view @@ -589,16 +548,13 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field(proto.STRING, number=1) - - view_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + view_id = proto.Field(proto.STRING, number=2,) view = proto.Field(proto.MESSAGE, number=3, message="LogView",) class UpdateViewRequest(proto.Message): r"""The parameters to ``UpdateView``. - Attributes: name (str): Required. The full resource name of the view to update @@ -623,16 +579,15 @@ class UpdateViewRequest(proto.Message): Example: ``updateMask=filter``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) view = proto.Field(proto.MESSAGE, number=2, message="LogView",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetViewRequest(proto.Message): r"""The parameters to ``GetView``. - Attributes: name (str): Required. The resource name of the policy: @@ -645,12 +600,11 @@ class GetViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteViewRequest(proto.Message): r"""The parameters to ``DeleteView``. - Attributes: name (str): Required. The full resource name of the view to delete: @@ -663,12 +617,11 @@ class DeleteViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. - Attributes: parent (str): Required. The parent resource whose sinks are to be listed: @@ -692,16 +645,13 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. - Attributes: sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. @@ -717,13 +667,11 @@ def raw_page(self): return self sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetSinkRequest(proto.Message): r"""The parameters to ``GetSink``. - Attributes: sink_name (str): Required. The resource name of the sink: @@ -738,12 +686,11 @@ class GetSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class CreateSinkRequest(proto.Message): r"""The parameters to ``CreateSink``. - Attributes: parent (str): Required. The resource in which to create the sink: @@ -777,16 +724,13 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) class UpdateSinkRequest(proto.Message): r"""The parameters to ``UpdateSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to update, @@ -837,18 +781,16 @@ class UpdateSinkRequest(proto.Message): Example: ``updateMask=filter``. """ - sink_name = proto.Field(proto.STRING, number=1) - + sink_name = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class DeleteSinkRequest(proto.Message): r"""The parameters to ``DeleteSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to delete, @@ -864,7 +806,7 @@ class DeleteSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class LogExclusion(proto.Message): @@ -913,22 +855,16 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - - disabled = proto.Field(proto.BOOL, number=4) - - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) + disabled = proto.Field(proto.BOOL, number=4,) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) class ListExclusionsRequest(proto.Message): r"""The parameters to ``ListExclusions``. - Attributes: parent (str): Required. The parent resource whose exclusions are to be @@ -953,16 +889,13 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. - Attributes: exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. @@ -978,13 +911,11 @@ def raw_page(self): return self exclusions = proto.RepeatedField(proto.MESSAGE, number=1, message="LogExclusion",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetExclusionRequest(proto.Message): r"""The parameters to ``GetExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion: @@ -1000,12 +931,11 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateExclusionRequest(proto.Message): r"""The parameters to ``CreateExclusion``. - Attributes: parent (str): Required. The parent resource in which to create the @@ -1026,14 +956,12 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) class UpdateExclusionRequest(proto.Message): r"""The parameters to ``UpdateExclusion``. - Attributes: name (str): Required. The resource name of the exclusion to update: @@ -1063,16 +991,15 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class DeleteExclusionRequest(proto.Message): r"""The parameters to ``DeleteExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion to @@ -1089,7 +1016,7 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetCmekSettingsRequest(proto.Message): @@ -1118,7 +1045,7 @@ class GetCmekSettingsRequest(proto.Message): applies to all projects and folders in the GCP organization. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateCmekSettingsRequest(proto.Message): @@ -1163,11 +1090,11 @@ class UpdateCmekSettingsRequest(proto.Message): Example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class CmekSettings(proto.Message): @@ -1232,11 +1159,9 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field(proto.STRING, number=1) - - kms_key_name = proto.Field(proto.STRING, number=2) - - service_account_id = proto.Field(proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + kms_key_name = proto.Field(proto.STRING, number=2,) + service_account_id = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index c2a8a600..4b39650f 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -170,34 +167,26 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) metric_descriptor = proto.Field( - proto.MESSAGE, number=5, message=ga_metric.MetricDescriptor, + proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) - - value_extractor = proto.Field(proto.STRING, number=6) - - label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7) - + value_extractor = proto.Field(proto.STRING, number=6,) + label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7,) bucket_options = proto.Field( - proto.MESSAGE, number=8, message=distribution.Distribution.BucketOptions, + proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, ) - - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) class ListLogMetricsRequest(proto.Message): r"""The parameters to ListLogMetrics. - Attributes: parent (str): Required. The name of the project containing the metrics: @@ -218,16 +207,13 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. - Attributes: metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. @@ -243,13 +229,11 @@ def raw_page(self): return self metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetLogMetricRequest(proto.Message): r"""The parameters to GetLogMetric. - Attributes: metric_name (str): Required. The resource name of the desired metric: @@ -259,12 +243,11 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) class CreateLogMetricRequest(proto.Message): r"""The parameters to CreateLogMetric. - Attributes: parent (str): Required. The resource name of the project in which to @@ -280,14 +263,12 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class UpdateLogMetricRequest(proto.Message): r"""The parameters to UpdateLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to update: @@ -304,14 +285,12 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field(proto.STRING, number=1) - + metric_name = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class DeleteLogMetricRequest(proto.Message): r"""The parameters to DeleteLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to delete: @@ -321,7 +300,7 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29b..4de65971 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index df379f1e..4de65971 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/logging_v2/__init__.py +++ b/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index d6a2f398..8be1ee06 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( ConfigServiceV2AsyncClient, @@ -38,10 +37,40 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_config from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -92,7 +121,7 @@ def test__get_default_mtls_endpoint(): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -109,7 +138,7 @@ def test_config_service_v2_client_from_service_account_info(client_class): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -162,7 +191,7 @@ def test_config_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -460,7 +489,7 @@ def test_list_buckets( transport: str = "grpc", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -473,19 +502,15 @@ def test_list_buckets( call.return_value = logging_config.ListBucketsResponse( next_page_token="next_page_token_value", ) - response = client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == "next_page_token_value" @@ -497,7 +522,7 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +530,6 @@ def test_list_buckets_empty_call(): client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() @@ -514,7 +538,7 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -527,18 +551,15 @@ async def test_list_buckets_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -548,17 +569,17 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() - client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -573,11 +594,14 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -585,7 +609,6 @@ async def test_list_buckets_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse() ) - await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -599,13 +622,12 @@ async def test_list_buckets_field_headers_async(): def test_list_buckets_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets(parent="parent_value",) @@ -614,12 +636,11 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -631,7 +652,9 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -649,13 +672,14 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -666,7 +690,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -704,7 +728,7 @@ def test_list_buckets_pager(): def test_list_buckets_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -734,7 +758,9 @@ def test_list_buckets_pages(): @pytest.mark.asyncio async def test_list_buckets_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -771,7 +797,9 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -807,7 +835,7 @@ def test_get_bucket( transport: str = "grpc", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -824,27 +852,19 @@ def test_get_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -856,7 +876,7 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -864,7 +884,6 @@ def test_get_bucket_empty_call(): client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() @@ -873,7 +892,7 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -892,26 +911,19 @@ async def test_get_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -921,17 +933,17 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -946,11 +958,14 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -958,7 +973,6 @@ async def test_get_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -975,7 +989,7 @@ def test_create_bucket( transport: str = "grpc", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -992,27 +1006,19 @@ def test_create_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1024,7 +1030,7 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1032,7 +1038,6 @@ def test_create_bucket_empty_call(): client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() @@ -1041,7 +1046,7 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1060,26 +1065,19 @@ async def test_create_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1089,17 +1087,17 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1114,11 +1112,14 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1126,7 +1127,6 @@ async def test_create_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1143,7 +1143,7 @@ def test_update_bucket( transport: str = "grpc", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1160,27 +1160,19 @@ def test_update_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1192,7 +1184,7 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1200,7 +1192,6 @@ def test_update_bucket_empty_call(): client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() @@ -1209,7 +1200,7 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1228,26 +1219,19 @@ async def test_update_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1257,17 +1241,17 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1282,11 +1266,14 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1294,7 +1281,6 @@ async def test_update_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1311,7 +1297,7 @@ def test_delete_bucket( transport: str = "grpc", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1322,13 +1308,11 @@ def test_delete_bucket( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1343,7 +1327,7 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1351,7 +1335,6 @@ def test_delete_bucket_empty_call(): client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() @@ -1360,7 +1343,7 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1371,13 +1354,11 @@ async def test_delete_bucket_async( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1390,17 +1371,17 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None - client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1415,17 +1396,19 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1442,7 +1425,7 @@ def test_undelete_bucket( transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1453,13 +1436,11 @@ def test_undelete_bucket( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1474,7 +1455,7 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1482,7 +1463,6 @@ def test_undelete_bucket_empty_call(): client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() @@ -1491,7 +1471,7 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1502,13 +1482,11 @@ async def test_undelete_bucket_async( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1521,17 +1499,17 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None - client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1546,17 +1524,19 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1573,7 +1553,7 @@ def test_list_views( transport: str = "grpc", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1586,19 +1566,15 @@ def test_list_views( call.return_value = logging_config.ListViewsResponse( next_page_token="next_page_token_value", ) - response = client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" @@ -1610,7 +1586,7 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1618,7 +1594,6 @@ def test_list_views_empty_call(): client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() @@ -1627,7 +1602,7 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1640,18 +1615,15 @@ async def test_list_views_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1661,17 +1633,17 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() - client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1686,11 +1658,14 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1698,7 +1673,6 @@ async def test_list_views_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse() ) - await client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1712,13 +1686,12 @@ async def test_list_views_field_headers_async(): def test_list_views_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_views(parent="parent_value",) @@ -1727,12 +1700,11 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_views_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1744,7 +1716,9 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1762,13 +1736,14 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1779,7 +1754,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1817,7 +1792,7 @@ def test_list_views_pager(): def test_list_views_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1847,7 +1822,9 @@ def test_list_views_pages(): @pytest.mark.asyncio async def test_list_views_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1884,7 +1861,9 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1918,7 +1897,7 @@ async def test_list_views_async_pages(): def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1931,23 +1910,17 @@ def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRe call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -1959,7 +1932,7 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1967,7 +1940,6 @@ def test_get_view_empty_call(): client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() @@ -1976,7 +1948,7 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1993,22 +1965,17 @@ async def test_get_view_async( filter="filter_value", ) ) - response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2018,17 +1985,17 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() - client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2043,11 +2010,14 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2055,7 +2025,6 @@ async def test_get_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2072,7 +2041,7 @@ def test_create_view( transport: str = "grpc", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2085,23 +2054,17 @@ def test_create_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2113,7 +2076,7 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2121,7 +2084,6 @@ def test_create_view_empty_call(): client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() @@ -2130,7 +2092,7 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2147,22 +2109,17 @@ async def test_create_view_async( filter="filter_value", ) ) - response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2172,17 +2129,17 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() - client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2197,11 +2154,14 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2209,7 +2169,6 @@ async def test_create_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2226,7 +2185,7 @@ def test_update_view( transport: str = "grpc", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2239,23 +2198,17 @@ def test_update_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2267,7 +2220,7 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2275,7 +2228,6 @@ def test_update_view_empty_call(): client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() @@ -2284,7 +2236,7 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2301,22 +2253,17 @@ async def test_update_view_async( filter="filter_value", ) ) - response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2326,17 +2273,17 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() - client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2351,11 +2298,14 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2363,7 +2313,6 @@ async def test_update_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2380,7 +2329,7 @@ def test_delete_view( transport: str = "grpc", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2391,13 +2340,11 @@ def test_delete_view( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2412,7 +2359,7 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2420,7 +2367,6 @@ def test_delete_view_empty_call(): client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() @@ -2429,7 +2375,7 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2440,13 +2386,11 @@ async def test_delete_view_async( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2459,17 +2403,17 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None - client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2484,17 +2428,19 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2511,7 +2457,7 @@ def test_list_sinks( transport: str = "grpc", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2524,19 +2470,15 @@ def test_list_sinks( call.return_value = logging_config.ListSinksResponse( next_page_token="next_page_token_value", ) - response = client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == "next_page_token_value" @@ -2548,7 +2490,7 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2556,7 +2498,6 @@ def test_list_sinks_empty_call(): client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() @@ -2565,7 +2506,7 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2578,18 +2519,15 @@ async def test_list_sinks_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse(next_page_token="next_page_token_value",) ) - response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2599,17 +2537,17 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2624,11 +2562,14 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2636,7 +2577,6 @@ async def test_list_sinks_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse() ) - await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2650,13 +2590,12 @@ async def test_list_sinks_field_headers_async(): def test_list_sinks_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_sinks(parent="parent_value",) @@ -2665,12 +2604,11 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2682,7 +2620,9 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2700,13 +2640,14 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2717,7 +2658,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2755,7 +2696,7 @@ def test_list_sinks_pager(): def test_list_sinks_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2785,7 +2726,9 @@ def test_list_sinks_pages(): @pytest.mark.asyncio async def test_list_sinks_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2822,7 +2765,9 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2856,7 +2801,7 @@ async def test_list_sinks_async_pages(): def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2879,33 +2824,22 @@ def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRe use_partitioned_tables=True ), ) - response = client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2917,7 +2851,7 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2925,7 +2859,6 @@ def test_get_sink_empty_call(): client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() @@ -2934,7 +2867,7 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2956,32 +2889,22 @@ async def test_get_sink_async( include_children=True, ) ) - response = await client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2991,17 +2914,17 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3016,11 +2939,14 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3028,7 +2954,6 @@ async def test_get_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3042,13 +2967,12 @@ async def test_get_sink_field_headers_async(): def test_get_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_sink(sink_name="sink_name_value",) @@ -3057,12 +2981,11 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_get_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3074,7 +2997,9 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3092,13 +3017,14 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3112,7 +3038,7 @@ def test_create_sink( transport: str = "grpc", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3135,33 +3061,22 @@ def test_create_sink( use_partitioned_tables=True ), ) - response = client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3173,7 +3088,7 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3181,7 +3096,6 @@ def test_create_sink_empty_call(): client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() @@ -3190,7 +3104,7 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3212,32 +3126,22 @@ async def test_create_sink_async( include_children=True, ) ) - response = await client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3247,17 +3151,17 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3272,11 +3176,14 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3284,7 +3191,6 @@ async def test_create_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3298,13 +3204,12 @@ async def test_create_sink_field_headers_async(): def test_create_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( @@ -3315,14 +3220,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") def test_create_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3336,7 +3239,9 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3356,15 +3261,15 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3380,7 +3285,7 @@ def test_update_sink( transport: str = "grpc", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3403,33 +3308,22 @@ def test_update_sink( use_partitioned_tables=True ), ) - response = client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3441,7 +3335,7 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3449,7 +3343,6 @@ def test_update_sink_empty_call(): client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() @@ -3458,7 +3351,7 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3480,32 +3373,22 @@ async def test_update_sink_async( include_children=True, ) ) - response = await client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3515,17 +3398,17 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3540,11 +3423,14 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3552,7 +3438,6 @@ async def test_update_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3566,35 +3451,31 @@ async def test_update_sink_field_headers_async(): def test_update_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3603,13 +3484,15 @@ def test_update_sink_flattened_error(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -3624,24 +3507,23 @@ async def test_update_sink_flattened_async(): response = await client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3650,7 +3532,7 @@ async def test_update_sink_flattened_error_async(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3658,7 +3540,7 @@ def test_delete_sink( transport: str = "grpc", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3669,13 +3551,11 @@ def test_delete_sink( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3690,7 +3570,7 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3698,7 +3578,6 @@ def test_delete_sink_empty_call(): client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() @@ -3707,7 +3586,7 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3718,13 +3597,11 @@ async def test_delete_sink_async( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3737,17 +3614,17 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None - client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3762,17 +3639,19 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3786,13 +3665,12 @@ async def test_delete_sink_field_headers_async(): def test_delete_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_sink(sink_name="sink_name_value",) @@ -3801,12 +3679,11 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3818,7 +3695,9 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -3834,13 +3713,14 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3854,7 +3734,7 @@ def test_list_exclusions( transport: str = "grpc", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3867,19 +3747,15 @@ def test_list_exclusions( call.return_value = logging_config.ListExclusionsResponse( next_page_token="next_page_token_value", ) - response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == "next_page_token_value" @@ -3891,7 +3767,7 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3899,7 +3775,6 @@ def test_list_exclusions_empty_call(): client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() @@ -3908,7 +3783,7 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3923,18 +3798,15 @@ async def test_list_exclusions_async( next_page_token="next_page_token_value", ) ) - response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3944,17 +3816,17 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3969,11 +3841,14 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3981,7 +3856,6 @@ async def test_list_exclusions_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListExclusionsResponse() ) - await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3995,13 +3869,12 @@ async def test_list_exclusions_field_headers_async(): def test_list_exclusions_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_exclusions(parent="parent_value",) @@ -4010,12 +3883,11 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4027,7 +3899,9 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4045,13 +3919,14 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4062,7 +3937,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4105,7 +3980,7 @@ def test_list_exclusions_pager(): def test_list_exclusions_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4140,7 +4015,9 @@ def test_list_exclusions_pages(): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4182,7 +4059,9 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4223,7 +4102,7 @@ def test_get_exclusion( transport: str = "grpc", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4239,25 +4118,18 @@ def test_get_exclusion( filter="filter_value", disabled=True, ) - response = client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4269,7 +4141,7 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4277,7 +4149,6 @@ def test_get_exclusion_empty_call(): client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() @@ -4286,7 +4157,7 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4304,24 +4175,18 @@ async def test_get_exclusion_async( disabled=True, ) ) - response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4331,17 +4196,17 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4356,11 +4221,14 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4368,7 +4236,6 @@ async def test_get_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4382,13 +4249,12 @@ async def test_get_exclusion_field_headers_async(): def test_get_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_exclusion(name="name_value",) @@ -4397,12 +4263,11 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4414,7 +4279,9 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4432,13 +4299,14 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4452,7 +4320,7 @@ def test_create_exclusion( transport: str = "grpc", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4468,25 +4336,18 @@ def test_create_exclusion( filter="filter_value", disabled=True, ) - response = client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4498,7 +4359,7 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4506,7 +4367,6 @@ def test_create_exclusion_empty_call(): client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() @@ -4515,7 +4375,7 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4533,24 +4393,18 @@ async def test_create_exclusion_async( disabled=True, ) ) - response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4560,17 +4414,17 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4585,11 +4439,14 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4597,7 +4454,6 @@ async def test_create_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4611,13 +4467,12 @@ async def test_create_exclusion_field_headers_async(): def test_create_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_exclusion( @@ -4629,14 +4484,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4650,7 +4503,9 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -4671,15 +4526,15 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4695,7 +4550,7 @@ def test_update_exclusion( transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4711,25 +4566,18 @@ def test_update_exclusion( filter="filter_value", disabled=True, ) - response = client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4741,7 +4589,7 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4749,7 +4597,6 @@ def test_update_exclusion_empty_call(): client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() @@ -4758,7 +4605,7 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4776,24 +4623,18 @@ async def test_update_exclusion_async( disabled=True, ) ) - response = await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4803,17 +4644,17 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4828,11 +4669,14 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4840,7 +4684,6 @@ async def test_update_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4854,35 +4697,31 @@ async def test_update_exclusion_field_headers_async(): def test_update_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4891,13 +4730,15 @@ def test_update_exclusion_flattened_error(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -4912,24 +4753,23 @@ async def test_update_exclusion_flattened_async(): response = await client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4938,7 +4778,7 @@ async def test_update_exclusion_flattened_error_async(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4946,7 +4786,7 @@ def test_delete_exclusion( transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4957,13 +4797,11 @@ def test_delete_exclusion( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -4978,7 +4816,7 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4986,7 +4824,6 @@ def test_delete_exclusion_empty_call(): client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() @@ -4995,7 +4832,7 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5006,13 +4843,11 @@ async def test_delete_exclusion_async( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -5025,17 +4860,17 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None - client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5050,17 +4885,19 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5074,13 +4911,12 @@ async def test_delete_exclusion_field_headers_async(): def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_exclusion(name="name_value",) @@ -5089,12 +4925,11 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5106,7 +4941,9 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5122,13 +4959,14 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5142,7 +4980,7 @@ def test_get_cmek_settings( transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5159,23 +4997,17 @@ def test_get_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5187,7 +5019,7 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5197,7 +5029,6 @@ def test_get_cmek_settings_empty_call(): client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() @@ -5206,7 +5037,7 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5225,22 +5056,17 @@ async def test_get_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5250,11 +5076,12 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5262,7 +5089,6 @@ def test_get_cmek_settings_field_headers(): type(client.transport.get_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5277,11 +5103,14 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5291,7 +5120,6 @@ async def test_get_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5308,7 +5136,7 @@ def test_update_cmek_settings( transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5325,23 +5153,17 @@ def test_update_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5353,7 +5175,7 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5363,7 +5185,6 @@ def test_update_cmek_settings_empty_call(): client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() @@ -5373,7 +5194,7 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5392,22 +5213,17 @@ async def test_update_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5417,11 +5233,12 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5429,7 +5246,6 @@ def test_update_cmek_settings_field_headers(): type(client.transport.update_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5444,11 +5260,14 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5458,7 +5277,6 @@ async def test_update_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5474,16 +5292,16 @@ async def test_update_cmek_settings_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5493,7 +5311,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5504,7 +5322,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ConfigServiceV2Client(transport=transport) assert client.transport is transport @@ -5513,13 +5331,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -5534,23 +5352,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -5562,7 +5380,7 @@ def test_config_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -5597,15 +5415,42 @@ def test_config_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -5623,19 +5468,38 @@ def test_config_service_v2_base_transport_with_credentials_file(): def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=( @@ -5648,14 +5512,46 @@ def test_config_service_v2_auth_adc(): ) -def test_config_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -5667,6 +5563,125 @@ def test_config_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -5675,7 +5690,7 @@ def test_config_service_v2_transport_auth_adc(): ], ) def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -5719,7 +5734,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ def test_config_service_v2_host_no_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -5729,7 +5744,7 @@ def test_config_service_v2_host_no_port(): def test_config_service_v2_host_with_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -5785,9 +5800,9 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -5873,7 +5888,6 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project,) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -5894,7 +5908,6 @@ def test_log_bucket_path(): project = "whelk" location = "octopus" bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @@ -5918,7 +5931,6 @@ def test_parse_log_bucket_path(): def test_log_exclusion_path(): project = "winkle" exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @@ -5941,7 +5953,6 @@ def test_parse_log_exclusion_path(): def test_log_sink_path(): project = "squid" sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -5964,7 +5975,6 @@ def test_log_view_path(): location = "nudibranch" bucket = "cuttlefish" view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @@ -5988,7 +5998,6 @@ def test_parse_log_view_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6009,7 +6018,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6028,7 +6036,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6047,7 +6054,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6067,7 +6073,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -6094,7 +6099,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ConfigServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6103,6 +6108,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 66f22621..5de01cf2 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,14 +23,14 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( LoggingServiceV2AsyncClient, @@ -39,15 +38,45 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.logging.type import http_request_pb2 as http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -99,7 +128,7 @@ def test__get_default_mtls_endpoint(): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -116,7 +145,7 @@ def test_logging_service_v2_client_from_service_account_info(client_class): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -169,7 +198,7 @@ def test_logging_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -465,7 +494,7 @@ def test_logging_service_v2_client_client_options_from_dict(): def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -476,13 +505,11 @@ def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogReque with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -497,7 +524,7 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +532,6 @@ def test_delete_log_empty_call(): client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() @@ -514,7 +540,7 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -525,13 +551,11 @@ async def test_delete_log_async( with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -544,17 +568,17 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None - client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -570,18 +594,18 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -595,13 +619,12 @@ async def test_delete_log_field_headers_async(): def test_delete_log_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log(log_name="log_name_value",) @@ -610,12 +633,11 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" def test_delete_log_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -628,7 +650,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -645,14 +667,13 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -667,7 +688,7 @@ def test_write_log_entries( transport: str = "grpc", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -680,17 +701,14 @@ def test_write_log_entries( ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - response = client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) @@ -702,7 +720,7 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -712,7 +730,6 @@ def test_write_log_entries_empty_call(): client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() @@ -721,7 +738,7 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -736,13 +753,11 @@ async def test_write_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.WriteLogEntriesResponse() ) - response = await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. @@ -755,7 +770,7 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -763,12 +778,11 @@ def test_write_log_entries_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -777,20 +791,16 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -798,7 +808,7 @@ def test_write_log_entries_flattened_error(): client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -807,7 +817,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -824,7 +834,7 @@ async def test_write_log_entries_flattened_async(): # using the keyword arguments to the method. response = await client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -833,22 +843,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -857,7 +863,7 @@ async def test_write_log_entries_flattened_error_async(): await client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -867,7 +873,7 @@ def test_list_log_entries( transport: str = "grpc", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -880,19 +886,15 @@ def test_list_log_entries( call.return_value = logging.ListLogEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == "next_page_token_value" @@ -904,7 +906,7 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -912,7 +914,6 @@ def test_list_log_entries_empty_call(): client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() @@ -921,7 +922,7 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -934,18 +935,15 @@ async def test_list_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -955,13 +953,12 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( @@ -974,16 +971,13 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -999,7 +993,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1022,18 +1016,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1048,7 +1039,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1083,7 +1074,7 @@ def test_list_log_entries_pager(): def test_list_log_entries_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1113,7 +1104,9 @@ def test_list_log_entries_pages(): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1150,7 +1143,9 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1182,7 @@ def test_list_monitored_resource_descriptors( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1202,19 +1197,15 @@ def test_list_monitored_resource_descriptors( call.return_value = logging.ListMonitoredResourceDescriptorsResponse( next_page_token="next_page_token_value", ) - response = client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == "next_page_token_value" @@ -1226,7 +1217,7 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1236,7 +1227,6 @@ def test_list_monitored_resource_descriptors_empty_call(): client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() @@ -1246,7 +1236,7 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1263,18 +1253,15 @@ async def test_list_monitored_resource_descriptors_async( next_page_token="next_page_token_value", ) ) - response = await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1284,7 +1271,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1294,9 +1281,9 @@ def test_list_monitored_resource_descriptors_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1305,14 +1292,14 @@ def test_list_monitored_resource_descriptors_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1326,13 +1313,13 @@ def test_list_monitored_resource_descriptors_pager(): results = [i for i in pager] assert len(results) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in results ) def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1342,9 +1329,9 @@ def test_list_monitored_resource_descriptors_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1353,14 +1340,14 @@ def test_list_monitored_resource_descriptors_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1372,7 +1359,9 @@ def test_list_monitored_resource_descriptors_pages(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1384,9 +1373,9 @@ async def test_list_monitored_resource_descriptors_async_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1395,14 +1384,14 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1415,14 +1404,16 @@ async def test_list_monitored_resource_descriptors_async_pager(): assert len(responses) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in responses ) @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1434,9 +1425,9 @@ async def test_list_monitored_resource_descriptors_async_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1445,14 +1436,14 @@ async def test_list_monitored_resource_descriptors_async_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1468,7 +1459,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1481,21 +1472,16 @@ def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest call.return_value = logging.ListLogsResponse( log_names=["log_names_value"], next_page_token="next_page_token_value", ) - response = client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1507,7 +1493,7 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1515,7 +1501,6 @@ def test_list_logs_empty_call(): client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() @@ -1524,7 +1509,7 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1539,20 +1524,16 @@ async def test_list_logs_async( log_names=["log_names_value"], next_page_token="next_page_token_value", ) ) - response = await client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1562,17 +1543,17 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() - client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1588,12 +1569,13 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1601,7 +1583,6 @@ async def test_list_logs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogsResponse() ) - await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1615,13 +1596,12 @@ async def test_list_logs_field_headers_async(): def test_list_logs_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs(parent="parent_value",) @@ -1630,12 +1610,11 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_logs_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1648,7 +1627,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1667,14 +1646,13 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,7 +1664,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1715,7 +1693,7 @@ def test_list_logs_pager(): def test_list_logs_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1736,7 +1714,9 @@ def test_list_logs_pages(): @pytest.mark.asyncio async def test_list_logs_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1764,7 +1744,9 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1791,26 +1773,23 @@ def test_tail_log_entries( transport: str = "grpc", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([logging.TailLogEntriesResponse()]) - response = client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1827,13 +1806,12 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -1843,13 +1821,11 @@ async def test_tail_log_entries_async( call.return_value.read = mock.AsyncMock( side_effect=[logging.TailLogEntriesResponse()] ) - response = await client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1865,16 +1841,16 @@ async def test_tail_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1884,7 +1860,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1895,7 +1871,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = LoggingServiceV2Client(transport=transport) assert client.transport is transport @@ -1904,13 +1880,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1925,23 +1901,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1953,7 +1929,7 @@ def test_logging_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1971,15 +1947,43 @@ def test_logging_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1998,19 +2002,39 @@ def test_logging_service_v2_base_transport_with_credentials_file(): def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=( @@ -2024,14 +2048,47 @@ def test_logging_service_v2_auth_adc(): ) -def test_logging_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -2044,6 +2101,127 @@ def test_logging_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2052,7 +2230,7 @@ def test_logging_service_v2_transport_auth_adc(): ], ) def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2097,7 +2275,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_logging_service_v2_host_no_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2107,7 +2285,7 @@ def test_logging_service_v2_host_no_port(): def test_logging_service_v2_host_with_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2163,9 +2341,9 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2254,7 +2432,6 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log,) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -2274,7 +2451,6 @@ def test_parse_log_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2295,7 +2471,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2314,7 +2489,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2333,7 +2507,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -2353,7 +2526,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2380,7 +2552,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.LoggingServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2389,6 +2561,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 6faec201..a8a420a2 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,18 +23,17 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import label_pb2 as label # type: ignore -from google.api import launch_stage_pb2 as launch_stage # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( MetricsServiceV2AsyncClient, @@ -43,10 +41,40 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_metrics from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -98,7 +126,7 @@ def test__get_default_mtls_endpoint(): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -115,7 +143,7 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -168,7 +196,7 @@ def test_metrics_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -466,7 +494,7 @@ def test_list_log_metrics( transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -479,19 +507,15 @@ def test_list_log_metrics( call.return_value = logging_metrics.ListLogMetricsResponse( next_page_token="next_page_token_value", ) - response = client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == "next_page_token_value" @@ -503,7 +527,7 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -511,7 +535,6 @@ def test_list_log_metrics_empty_call(): client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() @@ -520,7 +543,7 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -535,18 +558,15 @@ async def test_list_log_metrics_async( next_page_token="next_page_token_value", ) ) - response = await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -556,17 +576,17 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() - client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -582,12 +602,13 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -595,7 +616,6 @@ async def test_list_log_metrics_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.ListLogMetricsResponse() ) - await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -609,13 +629,12 @@ async def test_list_log_metrics_field_headers_async(): def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_metrics(parent="parent_value",) @@ -624,12 +643,11 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,7 +660,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -661,14 +679,13 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -680,7 +697,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -718,7 +735,7 @@ def test_list_log_metrics_pager(): def test_list_log_metrics_pages(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -748,7 +765,9 @@ def test_list_log_metrics_pages(): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -785,7 +804,9 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -821,7 +842,7 @@ def test_get_log_metric( transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -838,27 +859,19 @@ def test_get_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -870,7 +883,7 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,7 +891,6 @@ def test_get_log_metric_empty_call(): client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() @@ -887,7 +899,7 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -906,26 +918,19 @@ async def test_get_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -935,17 +940,17 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() - client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -961,12 +966,13 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -974,7 +980,6 @@ async def test_get_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -988,13 +993,12 @@ async def test_get_log_metric_field_headers_async(): def test_get_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_log_metric(metric_name="metric_name_value",) @@ -1003,12 +1007,11 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1021,7 +1024,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1040,14 +1043,13 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1062,7 +1064,7 @@ def test_create_log_metric( transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,27 +1083,19 @@ def test_create_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1113,7 +1107,7 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1123,7 +1117,6 @@ def test_create_log_metric_empty_call(): client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() @@ -1132,7 +1125,7 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1153,26 +1146,19 @@ async def test_create_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1182,11 +1168,12 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1194,7 +1181,6 @@ def test_create_log_metric_field_headers(): type(client.transport.create_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1210,12 +1196,13 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1225,7 +1212,6 @@ async def test_create_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1239,7 +1225,7 @@ async def test_create_log_metric_field_headers_async(): def test_create_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1247,7 +1233,6 @@ def test_create_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( @@ -1258,14 +1243,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1280,7 +1263,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1303,16 +1286,14 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1329,7 +1310,7 @@ def test_update_log_metric( transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1348,27 +1329,19 @@ def test_update_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1380,7 +1353,7 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1390,7 +1363,6 @@ def test_update_log_metric_empty_call(): client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() @@ -1399,7 +1371,7 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1420,26 +1392,19 @@ async def test_update_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1449,11 +1414,12 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1461,7 +1427,6 @@ def test_update_log_metric_field_headers(): type(client.transport.update_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1477,12 +1442,13 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1492,7 +1458,6 @@ async def test_update_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1506,7 +1471,7 @@ async def test_update_log_metric_field_headers_async(): def test_update_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1514,7 +1479,6 @@ def test_update_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_log_metric( @@ -1526,14 +1490,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1548,7 +1510,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1572,16 +1534,14 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1598,7 +1558,7 @@ def test_delete_log_metric( transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1611,13 +1571,11 @@ def test_delete_log_metric( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1632,7 +1590,7 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1642,7 +1600,6 @@ def test_delete_log_metric_empty_call(): client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() @@ -1651,7 +1608,7 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1664,13 +1621,11 @@ async def test_delete_log_metric_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1683,11 +1638,12 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1695,7 +1651,6 @@ def test_delete_log_metric_field_headers(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = None - client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1711,12 +1666,13 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1724,7 +1680,6 @@ async def test_delete_log_metric_field_headers_async(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1738,7 +1693,7 @@ async def test_delete_log_metric_field_headers_async(): def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1746,7 +1701,6 @@ def test_delete_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log_metric(metric_name="metric_name_value",) @@ -1755,12 +1709,11 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1773,7 +1726,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1792,14 +1745,13 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1813,16 +1765,16 @@ async def test_delete_log_metric_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1832,7 +1784,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1843,7 +1795,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = MetricsServiceV2Client(transport=transport) assert client.transport is transport @@ -1852,13 +1804,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1873,23 +1825,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1901,7 +1853,7 @@ def test_metrics_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1918,15 +1870,43 @@ def test_metrics_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1945,19 +1925,39 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=( @@ -1971,14 +1971,47 @@ def test_metrics_service_v2_auth_adc(): ) -def test_metrics_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -1991,6 +2024,127 @@ def test_metrics_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1999,7 +2153,7 @@ def test_metrics_service_v2_transport_auth_adc(): ], ) def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2044,7 +2198,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_metrics_service_v2_host_no_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2054,7 +2208,7 @@ def test_metrics_service_v2_host_no_port(): def test_metrics_service_v2_host_with_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2110,9 +2264,9 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2201,7 +2355,6 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_metric_path(): project = "squid" metric = "clam" - expected = "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @@ -2223,7 +2376,6 @@ def test_parse_log_metric_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2244,7 +2396,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2263,7 +2414,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2282,7 +2432,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -2302,7 +2451,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2329,7 +2477,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MetricsServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2338,6 +2486,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)