From 5390146e7bf83038a55755f53b119504ce000d62 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 1 Nov 2021 15:02:12 +0000 Subject: [PATCH] chore: use gapic-generator-python 0.53.4 (#341) - [ ] Regenerate this pull request now. docs: list oneofs in docstring fix(deps): require google-api-core >= 1.28.0 fix(deps): drop packaging dependency committer: busunkim96@ PiperOrigin-RevId: 406468269 Source-Link: https://github.com/googleapis/googleapis/commit/83d81b0c8fc22291a13398d6d77f02dc97a5b6f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ff001fbacb9e77e71d734de5f955c05fdae8526 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZmMDAxZmJhY2I5ZTc3ZTcxZDczNGRlNWY5NTVjMDVmZGFlODUyNiJ9 --- .../services/big_query_read/async_client.py | 22 ++-- .../services/big_query_read/client.py | 8 +- .../big_query_read/transports/base.py | 35 +----- .../big_query_read/transports/grpc_asyncio.py | 1 - .../services/big_query_write/async_client.py | 36 +++--- .../services/big_query_write/client.py | 14 ++- .../big_query_write/transports/base.py | 35 +----- .../transports/grpc_asyncio.py | 1 - .../bigquery_storage_v1/types/storage.py | 24 ++++ .../cloud/bigquery_storage_v1/types/stream.py | 12 ++ .../services/big_query_read/async_client.py | 22 ++-- .../services/big_query_read/client.py | 8 +- .../big_query_read/transports/base.py | 35 +----- .../big_query_read/transports/grpc_asyncio.py | 1 - .../services/big_query_write/async_client.py | 36 +++--- .../services/big_query_write/client.py | 14 ++- .../big_query_write/transports/base.py | 35 +----- .../transports/grpc_asyncio.py | 1 - .../bigquery_storage_v1beta2/types/storage.py | 23 ++++ .../bigquery_storage_v1beta2/types/stream.py | 9 ++ setup.py | 3 +- testing/constraints-3.6.txt | 4 +- .../test_big_query_read.py | 103 ++---------------- .../test_big_query_write.py | 103 ++---------------- .../test_big_query_read.py | 103 ++---------------- .../test_big_query_write.py | 103 ++---------------- 26 files changed, 198 insertions(+), 593 deletions(-) diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index 2215c6cd..c138d933 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1.types import arrow from google.cloud.bigquery_storage_v1.types import avro from google.cloud.bigquery_storage_v1.types import storage @@ -168,12 +170,12 @@ def __init__( async def create_read_session( self, - request: storage.CreateReadSessionRequest = None, + request: Union[storage.CreateReadSessionRequest, dict] = None, *, parent: str = None, read_session: stream.ReadSession = None, max_stream_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.ReadSession: @@ -202,7 +204,7 @@ async def create_read_session( caller. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest, dict]): The request object. Request message for `CreateReadSession`. parent (:class:`str`): @@ -301,11 +303,11 @@ async def create_read_session( def read_rows( self, - request: storage.ReadRowsRequest = None, + request: Union[storage.ReadRowsRequest, dict] = None, *, read_stream: str = None, offset: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]: @@ -319,7 +321,7 @@ def read_rows( reflecting the current state of the stream. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.ReadRowsRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. read_stream (:class:`str`): Required. Stream to read rows from. @@ -400,9 +402,9 @@ def read_rows( async def split_read_stream( self, - request: storage.SplitReadStreamRequest = None, + request: Union[storage.SplitReadStreamRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.SplitReadStreamResponse: @@ -421,7 +423,7 @@ async def split_read_stream( once the streams have been read to completion. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest, dict]): The request object. Request message for `SplitReadStream`. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py index 40ec76d9..a00737ff 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1.types import arrow from google.cloud.bigquery_storage_v1.types import avro from google.cloud.bigquery_storage_v1.types import storage @@ -391,7 +393,7 @@ def create_read_session( parent: str = None, read_session: stream.ReadSession = None, max_stream_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.ReadSession: @@ -513,7 +515,7 @@ def read_rows( *, read_stream: str = None, offset: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[storage.ReadRowsResponse]: @@ -601,7 +603,7 @@ def split_read_stream( self, request: Union[storage.SplitReadStreamRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.SplitReadStreamResponse: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index db38c893..12941108 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BigQueryReadTransport(abc.ABC): """Abstract transport class for BigQueryRead.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py index 7c075872..e6b3e7f9 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py index e167c0c6..3c49ea2e 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py @@ -28,13 +28,15 @@ ) import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1.types import storage from google.cloud.bigquery_storage_v1.types import stream from google.cloud.bigquery_storage_v1.types import table @@ -181,11 +183,11 @@ def __init__( async def create_write_stream( self, - request: storage.CreateWriteStreamRequest = None, + request: Union[storage.CreateWriteStreamRequest, dict] = None, *, parent: str = None, write_stream: stream.WriteStream = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -198,7 +200,7 @@ async def create_write_stream( received. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest, dict]): The request object. Request message for `CreateWriteStream`. parent (:class:`str`): @@ -280,7 +282,7 @@ def append_rows( self, requests: AsyncIterator[storage.AppendRowsRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[storage.AppendRowsResponse]]: @@ -366,17 +368,17 @@ def append_rows( async def get_write_stream( self, - request: storage.GetWriteStreamRequest = None, + request: Union[storage.GetWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: r"""Gets information about a write stream. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest, dict]): The request object. Request message for `GetWriteStreamRequest`. name (:class:`str`): @@ -448,10 +450,10 @@ async def get_write_stream( async def finalize_write_stream( self, - request: storage.FinalizeWriteStreamRequest = None, + request: Union[storage.FinalizeWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FinalizeWriteStreamResponse: @@ -459,7 +461,7 @@ async def finalize_write_stream( the stream. Finalize is not supported on the '_default' stream. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking `FinalizeWriteStream`. name (:class:`str`): @@ -528,10 +530,10 @@ async def finalize_write_stream( async def batch_commit_write_streams( self, - request: storage.BatchCommitWriteStreamsRequest = None, + request: Union[storage.BatchCommitWriteStreamsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.BatchCommitWriteStreamsResponse: @@ -543,7 +545,7 @@ async def batch_commit_write_streams( becomes available for read operations. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for `BatchCommitWriteStreams`. parent (:class:`str`): @@ -613,10 +615,10 @@ async def batch_commit_write_streams( async def flush_rows( self, - request: storage.FlushRowsRequest = None, + request: Union[storage.FlushRowsRequest, dict] = None, *, write_stream: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FlushRowsResponse: @@ -632,7 +634,7 @@ async def flush_rows( BUFFERED. Args: - request (:class:`google.cloud.bigquery_storage_v1.types.FlushRowsRequest`): + request (Union[google.cloud.bigquery_storage_v1.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. write_stream (:class:`str`): Required. The stream that is the diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py index fbbed83e..9018ead5 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1.types import storage from google.cloud.bigquery_storage_v1.types import stream from google.cloud.bigquery_storage_v1.types import table @@ -374,7 +376,7 @@ def create_write_stream( *, parent: str = None, write_stream: stream.WriteStream = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -459,7 +461,7 @@ def append_rows( self, requests: Iterator[storage.AppendRowsRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[storage.AppendRowsResponse]: @@ -535,7 +537,7 @@ def get_write_stream( request: Union[storage.GetWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -607,7 +609,7 @@ def finalize_write_stream( request: Union[storage.FinalizeWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FinalizeWriteStreamResponse: @@ -677,7 +679,7 @@ def batch_commit_write_streams( request: Union[storage.BatchCommitWriteStreamsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.BatchCommitWriteStreamsResponse: @@ -754,7 +756,7 @@ def flush_rows( request: Union[storage.FlushRowsRequest, dict] = None, *, write_stream: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FlushRowsResponse: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py index 21e4b644..d51c0dff 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BigQueryWriteTransport(abc.ABC): """Abstract transport class for BigQueryWrite.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py index d3b096a8..bdb512ea 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/bigquery_storage_v1/types/storage.py b/google/cloud/bigquery_storage_v1/types/storage.py index 822a1ca2..30c2ef31 100644 --- a/google/cloud/bigquery_storage_v1/types/storage.py +++ b/google/cloud/bigquery_storage_v1/types/storage.py @@ -149,12 +149,21 @@ class ReadRowsResponse(proto.Message): r"""Response from calling ``ReadRows`` may include row data, progress and throttling information. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: avro_rows (google.cloud.bigquery_storage_v1.types.AvroRows): Serialized row data in AVRO format. + This field is a member of `oneof`_ ``rows``. arrow_record_batch (google.cloud.bigquery_storage_v1.types.ArrowRecordBatch): Serialized row data in Arrow RecordBatch format. + This field is a member of `oneof`_ ``rows``. row_count (int): Number of serialized rows in the rows block. stats (google.cloud.bigquery_storage_v1.types.StreamStats): @@ -165,8 +174,10 @@ class ReadRowsResponse(proto.Message): status. avro_schema (google.cloud.bigquery_storage_v1.types.AvroSchema): Output only. Avro schema. + This field is a member of `oneof`_ ``schema``. arrow_schema (google.cloud.bigquery_storage_v1.types.ArrowSchema): Output only. Arrow schema. + This field is a member of `oneof`_ ``schema``. """ avro_rows = proto.Field( @@ -253,6 +264,9 @@ class AppendRowsRequest(proto.Message): the first request sent each time the gRPC network connection is opened/reopened. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: write_stream (str): Required. The write_stream identifies the target of the @@ -274,6 +288,7 @@ class AppendRowsRequest(proto.Message): AppendRows for the '_default' stream. proto_rows (google.cloud.bigquery_storage_v1.types.AppendRowsRequest.ProtoData): Rows in proto format. + This field is a member of `oneof`_ ``rows``. trace_id (str): Id set by client to annotate its identity. Only initial request setting is respected. @@ -312,9 +327,17 @@ class ProtoData(proto.Message): class AppendRowsResponse(proto.Message): r"""Response message for ``AppendRows``. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: append_result (google.cloud.bigquery_storage_v1.types.AppendRowsResponse.AppendResult): Result if the append is successful. + This field is a member of `oneof`_ ``response``. error (google.rpc.status_pb2.Status): Error returned when problems were encountered. If present, it indicates rows were not accepted into the system. Users @@ -339,6 +362,7 @@ class AppendRowsResponse(proto.Message): INTERNAL: Indicates server side error(s) that can be retried. + This field is a member of `oneof`_ ``response``. updated_schema (google.cloud.bigquery_storage_v1.types.TableSchema): If backend detects a schema update, pass it to user so that user can use it to input new diff --git a/google/cloud/bigquery_storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py index c49a8a3f..833f197f 100644 --- a/google/cloud/bigquery_storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -37,6 +37,13 @@ class DataFormat(proto.Enum): class ReadSession(proto.Message): r"""Information about the ReadSession. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Unique identifier for the session, in the form @@ -50,8 +57,10 @@ class ReadSession(proto.Message): Immutable. Data format of the output data. avro_schema (google.cloud.bigquery_storage_v1.types.AvroSchema): Output only. Avro schema. + This field is a member of `oneof`_ ``schema``. arrow_schema (google.cloud.bigquery_storage_v1.types.ArrowSchema): Output only. Arrow schema. + This field is a member of `oneof`_ ``schema``. table (str): Immutable. Table that this ReadSession is reading from, in the form @@ -94,6 +103,8 @@ class TableModifiers(proto.Message): class TableReadOptions(proto.Message): r"""Options dictating how we read a table. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: selected_fields (Sequence[str]): Names of the fields in the table that should be read. If @@ -114,6 +125,7 @@ class TableReadOptions(proto.Message): arrow_serialization_options (google.cloud.bigquery_storage_v1.types.ArrowSerializationOptions): Optional. Options specific to the Apache Arrow output format. + This field is a member of `oneof`_ ``output_format_serialization_options``. """ selected_fields = proto.RepeatedField(proto.STRING, number=1,) diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py index fbc7a25f..812b28d1 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1beta2.types import arrow from google.cloud.bigquery_storage_v1beta2.types import avro from google.cloud.bigquery_storage_v1beta2.types import storage @@ -170,12 +172,12 @@ def __init__( async def create_read_session( self, - request: storage.CreateReadSessionRequest = None, + request: Union[storage.CreateReadSessionRequest, dict] = None, *, parent: str = None, read_session: stream.ReadSession = None, max_stream_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.ReadSession: @@ -204,7 +206,7 @@ async def create_read_session( caller. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateReadSessionRequest, dict]): The request object. Request message for `CreateReadSession`. parent (:class:`str`): @@ -303,11 +305,11 @@ async def create_read_session( def read_rows( self, - request: storage.ReadRowsRequest = None, + request: Union[storage.ReadRowsRequest, dict] = None, *, read_stream: str = None, offset: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]: @@ -321,7 +323,7 @@ def read_rows( reflecting the current state of the stream. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.ReadRowsRequest, dict]): The request object. Request message for `ReadRows`. read_stream (:class:`str`): Required. Stream to read rows from. @@ -402,9 +404,9 @@ def read_rows( async def split_read_stream( self, - request: storage.SplitReadStreamRequest = None, + request: Union[storage.SplitReadStreamRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.SplitReadStreamResponse: @@ -423,7 +425,7 @@ async def split_read_stream( once the streams have been read to completion. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.SplitReadStreamRequest, dict]): The request object. Request message for `SplitReadStream`. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py index bfdd0ac0..005746ce 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1beta2.types import arrow from google.cloud.bigquery_storage_v1beta2.types import avro from google.cloud.bigquery_storage_v1beta2.types import storage @@ -393,7 +395,7 @@ def create_read_session( parent: str = None, read_session: stream.ReadSession = None, max_stream_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.ReadSession: @@ -515,7 +517,7 @@ def read_rows( *, read_stream: str = None, offset: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[storage.ReadRowsResponse]: @@ -603,7 +605,7 @@ def split_read_stream( self, request: Union[storage.SplitReadStreamRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.SplitReadStreamResponse: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py index ba5b0694..55f9cb27 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BigQueryReadTransport(abc.ABC): """Abstract transport class for BigQueryRead.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py index f715be36..de854816 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py index 37f11c64..dd3da22f 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/async_client.py @@ -28,13 +28,15 @@ ) import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1beta2.types import storage from google.cloud.bigquery_storage_v1beta2.types import stream from google.cloud.bigquery_storage_v1beta2.types import table @@ -179,11 +181,11 @@ def __init__( async def create_write_stream( self, - request: storage.CreateWriteStreamRequest = None, + request: Union[storage.CreateWriteStreamRequest, dict] = None, *, parent: str = None, write_stream: stream.WriteStream = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -196,7 +198,7 @@ async def create_write_stream( received. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.CreateWriteStreamRequest, dict]): The request object. Request message for `CreateWriteStream`. parent (:class:`str`): @@ -279,7 +281,7 @@ def append_rows( self, requests: AsyncIterator[storage.AppendRowsRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[storage.AppendRowsResponse]]: @@ -349,17 +351,17 @@ def append_rows( async def get_write_stream( self, - request: storage.GetWriteStreamRequest = None, + request: Union[storage.GetWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: r"""Gets a write stream. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.GetWriteStreamRequest, dict]): The request object. Request message for `GetWriteStreamRequest`. name (:class:`str`): @@ -431,10 +433,10 @@ async def get_write_stream( async def finalize_write_stream( self, - request: storage.FinalizeWriteStreamRequest = None, + request: Union[storage.FinalizeWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FinalizeWriteStreamResponse: @@ -442,7 +444,7 @@ async def finalize_write_stream( the stream. Finalize is not supported on the '_default' stream. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.FinalizeWriteStreamRequest, dict]): The request object. Request message for invoking `FinalizeWriteStream`. name (:class:`str`): @@ -511,10 +513,10 @@ async def finalize_write_stream( async def batch_commit_write_streams( self, - request: storage.BatchCommitWriteStreamsRequest = None, + request: Union[storage.BatchCommitWriteStreamsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.BatchCommitWriteStreamsResponse: @@ -525,7 +527,7 @@ async def batch_commit_write_streams( operations. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.BatchCommitWriteStreamsRequest, dict]): The request object. Request message for `BatchCommitWriteStreams`. parent (:class:`str`): @@ -595,10 +597,10 @@ async def batch_commit_write_streams( async def flush_rows( self, - request: storage.FlushRowsRequest = None, + request: Union[storage.FlushRowsRequest, dict] = None, *, write_stream: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FlushRowsResponse: @@ -610,7 +612,7 @@ async def flush_rows( \_default stream, since it is not BUFFERED. Args: - request (:class:`google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest`): + request (Union[google.cloud.bigquery_storage_v1beta2.types.FlushRowsRequest, dict]): The request object. Request message for `FlushRows`. write_stream (:class:`str`): Required. The stream that is the diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py index 896b9a81..891274ac 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.bigquery_storage_v1beta2.types import storage from google.cloud.bigquery_storage_v1beta2.types import stream from google.cloud.bigquery_storage_v1beta2.types import table @@ -372,7 +374,7 @@ def create_write_stream( *, parent: str = None, write_stream: stream.WriteStream = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -457,7 +459,7 @@ def append_rows( self, requests: Iterator[storage.AppendRowsRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[storage.AppendRowsResponse]: @@ -516,7 +518,7 @@ def get_write_stream( request: Union[storage.GetWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> stream.WriteStream: @@ -588,7 +590,7 @@ def finalize_write_stream( request: Union[storage.FinalizeWriteStreamRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FinalizeWriteStreamResponse: @@ -658,7 +660,7 @@ def batch_commit_write_streams( request: Union[storage.BatchCommitWriteStreamsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.BatchCommitWriteStreamsResponse: @@ -734,7 +736,7 @@ def flush_rows( request: Union[storage.FlushRowsRequest, dict] = None, *, write_stream: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> storage.FlushRowsResponse: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py index 7c883bb0..cc2d9b35 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BigQueryWriteTransport(abc.ABC): """Abstract transport class for BigQueryWrite.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py index aafd2022..73b03a8e 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/google/cloud/bigquery_storage_v1beta2/types/storage.py b/google/cloud/bigquery_storage_v1beta2/types/storage.py index 00586f9a..b6814940 100644 --- a/google/cloud/bigquery_storage_v1beta2/types/storage.py +++ b/google/cloud/bigquery_storage_v1beta2/types/storage.py @@ -149,12 +149,21 @@ class ReadRowsResponse(proto.Message): r"""Response from calling ``ReadRows`` may include row data, progress and throttling information. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: avro_rows (google.cloud.bigquery_storage_v1beta2.types.AvroRows): Serialized row data in AVRO format. + This field is a member of `oneof`_ ``rows``. arrow_record_batch (google.cloud.bigquery_storage_v1beta2.types.ArrowRecordBatch): Serialized row data in Arrow RecordBatch format. + This field is a member of `oneof`_ ``rows``. row_count (int): Number of serialized rows in the rows block. stats (google.cloud.bigquery_storage_v1beta2.types.StreamStats): @@ -165,8 +174,10 @@ class ReadRowsResponse(proto.Message): status. avro_schema (google.cloud.bigquery_storage_v1beta2.types.AvroSchema): Output only. Avro schema. + This field is a member of `oneof`_ ``schema``. arrow_schema (google.cloud.bigquery_storage_v1beta2.types.ArrowSchema): Output only. Arrow schema. + This field is a member of `oneof`_ ``schema``. """ avro_rows = proto.Field( @@ -248,6 +259,8 @@ class CreateWriteStreamRequest(proto.Message): class AppendRowsRequest(proto.Message): r"""Request message for ``AppendRows``. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: write_stream (str): Required. The stream that is the target of the append @@ -265,6 +278,7 @@ class AppendRowsRequest(proto.Message): AppendRows for the '_default' stream. proto_rows (google.cloud.bigquery_storage_v1beta2.types.AppendRowsRequest.ProtoData): Rows in proto format. + This field is a member of `oneof`_ ``rows``. trace_id (str): Id set by client to annotate its identity. Only initial request setting is respected. @@ -295,9 +309,17 @@ class ProtoData(proto.Message): class AppendRowsResponse(proto.Message): r"""Response message for ``AppendRows``. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: append_result (google.cloud.bigquery_storage_v1beta2.types.AppendRowsResponse.AppendResult): Result if the append is successful. + This field is a member of `oneof`_ ``response``. error (google.rpc.status_pb2.Status): Error returned when problems were encountered. If present, it indicates rows were not accepted into the system. Users @@ -322,6 +344,7 @@ class AppendRowsResponse(proto.Message): INTERNAL: Indicates server side error(s) that can be retried. + This field is a member of `oneof`_ ``response``. updated_schema (google.cloud.bigquery_storage_v1beta2.types.TableSchema): If backend detects a schema update, pass it to user so that user can use it to input new diff --git a/google/cloud/bigquery_storage_v1beta2/types/stream.py b/google/cloud/bigquery_storage_v1beta2/types/stream.py index 9a658bba..6dfc250f 100644 --- a/google/cloud/bigquery_storage_v1beta2/types/stream.py +++ b/google/cloud/bigquery_storage_v1beta2/types/stream.py @@ -37,6 +37,13 @@ class DataFormat(proto.Enum): class ReadSession(proto.Message): r"""Information about the ReadSession. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Unique identifier for the session, in the form @@ -50,8 +57,10 @@ class ReadSession(proto.Message): Immutable. Data format of the output data. avro_schema (google.cloud.bigquery_storage_v1beta2.types.AvroSchema): Output only. Avro schema. + This field is a member of `oneof`_ ``schema``. arrow_schema (google.cloud.bigquery_storage_v1beta2.types.ArrowSchema): Output only. Arrow schema. + This field is a member of `oneof`_ ``schema``. table (str): Immutable. Table that this ReadSession is reading from, in the form diff --git a/setup.py b/setup.py index 1af3d1d8..a1ea2c1b 100644 --- a/setup.py +++ b/setup.py @@ -27,9 +27,8 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "proto-plus >= 1.18.0", - "packaging >= 14.3", "libcst >= 0.2.5", ] extras = { diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index eb1840b7..2b05babd 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,11 +5,9 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.26.0 +google-api-core==1.28.0 proto-plus==1.18.0 libcst==0.2.5 fastavro==0.21.2 pandas==0.21.1 pyarrow==0.15.0 -packaging==14.3 -google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is transitively required through google-api-core diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index 23480a74..297e673f 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -37,9 +36,6 @@ ) from google.cloud.bigquery_storage_v1.services.big_query_read import BigQueryReadClient from google.cloud.bigquery_storage_v1.services.big_query_read import transports -from google.cloud.bigquery_storage_v1.services.big_query_read.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.bigquery_storage_v1.types import arrow from google.cloud.bigquery_storage_v1.types import avro from google.cloud.bigquery_storage_v1.types import storage @@ -49,20 +45,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +192,7 @@ def test_big_query_read_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -227,7 +209,7 @@ def test_big_query_read_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -244,7 +226,7 @@ def test_big_query_read_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,7 +255,7 @@ def test_big_query_read_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -330,7 +312,7 @@ def test_big_query_read_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -372,7 +354,7 @@ def test_big_query_read_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -394,7 +376,7 @@ def test_big_query_read_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -425,7 +407,7 @@ def test_big_query_read_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -456,7 +438,7 @@ def test_big_query_read_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1181,7 +1163,6 @@ def test_big_query_read_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_big_query_read_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1206,30 +1187,6 @@ def test_big_query_read_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.bigquery_storage_v1.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BigQueryReadTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_big_query_read_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1241,7 +1198,6 @@ def test_big_query_read_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_big_query_read_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1258,22 +1214,6 @@ def test_big_query_read_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BigQueryReadClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1281,7 +1221,6 @@ def test_big_query_read_auth_adc_old_google_auth(): transports.BigQueryReadGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_big_query_read_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1299,30 +1238,6 @@ def test_big_query_read_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.BigQueryReadGrpcTransport, - transports.BigQueryReadGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py index a6b1f1f8..651ffb41 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,9 +38,6 @@ BigQueryWriteClient, ) from google.cloud.bigquery_storage_v1.services.big_query_write import transports -from google.cloud.bigquery_storage_v1.services.big_query_write.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.bigquery_storage_v1.types import protobuf from google.cloud.bigquery_storage_v1.types import storage from google.cloud.bigquery_storage_v1.types import stream @@ -54,20 +50,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -224,7 +206,7 @@ def test_big_query_write_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -241,7 +223,7 @@ def test_big_query_write_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,7 +240,7 @@ def test_big_query_write_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -287,7 +269,7 @@ def test_big_query_write_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,7 +328,7 @@ def test_big_query_write_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -388,7 +370,7 @@ def test_big_query_write_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -410,7 +392,7 @@ def test_big_query_write_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -441,7 +423,7 @@ def test_big_query_write_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -472,7 +454,7 @@ def test_big_query_write_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1750,7 +1732,6 @@ def test_big_query_write_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_big_query_write_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1775,30 +1756,6 @@ def test_big_query_write_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.bigquery_storage_v1.services.big_query_write.transports.BigQueryWriteTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BigQueryWriteTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_big_query_write_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1810,7 +1767,6 @@ def test_big_query_write_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_big_query_write_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1827,22 +1783,6 @@ def test_big_query_write_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BigQueryWriteClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1850,7 +1790,6 @@ def test_big_query_write_auth_adc_old_google_auth(): transports.BigQueryWriteGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_big_query_write_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1868,30 +1807,6 @@ def test_big_query_write_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.BigQueryWriteGrpcTransport, - transports.BigQueryWriteGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py index c519ab6e..6b290ab5 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,9 +38,6 @@ BigQueryReadClient, ) from google.cloud.bigquery_storage_v1beta2.services.big_query_read import transports -from google.cloud.bigquery_storage_v1beta2.services.big_query_read.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.bigquery_storage_v1beta2.types import arrow from google.cloud.bigquery_storage_v1beta2.types import avro from google.cloud.bigquery_storage_v1beta2.types import storage @@ -51,20 +47,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -212,7 +194,7 @@ def test_big_query_read_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,7 +211,7 @@ def test_big_query_read_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -246,7 +228,7 @@ def test_big_query_read_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -275,7 +257,7 @@ def test_big_query_read_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -332,7 +314,7 @@ def test_big_query_read_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -374,7 +356,7 @@ def test_big_query_read_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -396,7 +378,7 @@ def test_big_query_read_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -427,7 +409,7 @@ def test_big_query_read_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -458,7 +440,7 @@ def test_big_query_read_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1179,7 +1161,6 @@ def test_big_query_read_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_big_query_read_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1204,30 +1185,6 @@ def test_big_query_read_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.bigquery_storage_v1beta2.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BigQueryReadTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_big_query_read_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1239,7 +1196,6 @@ def test_big_query_read_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_big_query_read_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1256,22 +1212,6 @@ def test_big_query_read_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BigQueryReadClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1279,7 +1219,6 @@ def test_big_query_read_auth_adc_old_google_auth(): transports.BigQueryReadGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_big_query_read_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1297,30 +1236,6 @@ def test_big_query_read_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.BigQueryReadGrpcTransport, - transports.BigQueryReadGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_big_query_read_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.readonly", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py index df48cf45..13159bce 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,9 +38,6 @@ BigQueryWriteClient, ) from google.cloud.bigquery_storage_v1beta2.services.big_query_write import transports -from google.cloud.bigquery_storage_v1beta2.services.big_query_write.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.bigquery_storage_v1beta2.types import protobuf from google.cloud.bigquery_storage_v1beta2.types import storage from google.cloud.bigquery_storage_v1beta2.types import stream @@ -54,20 +50,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -224,7 +206,7 @@ def test_big_query_write_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -241,7 +223,7 @@ def test_big_query_write_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,7 +240,7 @@ def test_big_query_write_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -287,7 +269,7 @@ def test_big_query_write_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,7 +328,7 @@ def test_big_query_write_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -388,7 +370,7 @@ def test_big_query_write_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -410,7 +392,7 @@ def test_big_query_write_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -441,7 +423,7 @@ def test_big_query_write_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -472,7 +454,7 @@ def test_big_query_write_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1750,7 +1732,6 @@ def test_big_query_write_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_big_query_write_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1775,30 +1756,6 @@ def test_big_query_write_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.bigquery_storage_v1beta2.services.big_query_write.transports.BigQueryWriteTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BigQueryWriteTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_big_query_write_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1810,7 +1767,6 @@ def test_big_query_write_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_big_query_write_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1827,22 +1783,6 @@ def test_big_query_write_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BigQueryWriteClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1850,7 +1790,6 @@ def test_big_query_write_auth_adc_old_google_auth(): transports.BigQueryWriteGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_big_query_write_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1868,30 +1807,6 @@ def test_big_query_write_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.BigQueryWriteGrpcTransport, - transports.BigQueryWriteGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_big_query_write_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/bigquery", - "https://www.googleapis.com/auth/bigquery.insertdata", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [