From fbbb439b8c77fa9367a4b5bea725dd0b0f26b769 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 21 Oct 2020 09:48:03 -0700 Subject: [PATCH] feat: add public transport property and path formatting methods to client (#80) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/fd138ecd-79c4-4b01-b00c-a340f46edbee/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 338157137 Source-Link: https://github.com/googleapis/googleapis/commit/c7331b75b0b7bbd614373b7d37085db1c80dd4be PiperOrigin-RevId: 338118656 Source-Link: https://github.com/googleapis/googleapis/commit/7e400b0d3a810afe0f28226306eab2ba905df16b --- .../services/big_query_read/async_client.py | 50 +++- .../services/big_query_read/client.py | 92 ++++++- .../big_query_read/transports/base.py | 4 +- .../big_query_read/transports/grpc.py | 14 +- .../bigquery_storage_v1/types/storage.py | 4 +- synth.metadata | 6 +- .../test_big_query_read.py | 235 ++++++++++++++---- 7 files changed, 333 insertions(+), 72 deletions(-) diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index f5c80cd0..7108ffd0 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -18,7 +18,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, AsyncIterable, Sequence, Tuple, Type, Union +from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -53,10 +53,46 @@ class BigQueryReadAsyncClient: parse_read_session_path = staticmethod(BigQueryReadClient.parse_read_session_path) read_stream_path = staticmethod(BigQueryReadClient.read_stream_path) parse_read_stream_path = staticmethod(BigQueryReadClient.parse_read_stream_path) + table_path = staticmethod(BigQueryReadClient.table_path) + parse_table_path = staticmethod(BigQueryReadClient.parse_table_path) + + common_billing_account_path = staticmethod( + BigQueryReadClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BigQueryReadClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(BigQueryReadClient.common_folder_path) + parse_common_folder_path = staticmethod(BigQueryReadClient.parse_common_folder_path) + + common_organization_path = staticmethod(BigQueryReadClient.common_organization_path) + parse_common_organization_path = staticmethod( + BigQueryReadClient.parse_common_organization_path + ) + + common_project_path = staticmethod(BigQueryReadClient.common_project_path) + parse_common_project_path = staticmethod( + BigQueryReadClient.parse_common_project_path + ) + + common_location_path = staticmethod(BigQueryReadClient.common_location_path) + parse_common_location_path = staticmethod( + BigQueryReadClient.parse_common_location_path + ) from_service_account_file = BigQueryReadClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> BigQueryReadTransport: + """Return the transport used by the client instance. + + Returns: + BigQueryReadTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(BigQueryReadClient).get_transport_class, type(BigQueryReadClient) ) @@ -191,7 +227,8 @@ async def create_read_session( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, read_session, max_stream_count]): + has_flattened_params = any([parent, read_session, max_stream_count]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -218,7 +255,7 @@ async def create_read_session( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, @@ -248,7 +285,7 @@ def read_rows( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[storage.ReadRowsResponse]: + ) -> Awaitable[AsyncIterable[storage.ReadRowsResponse]]: r"""Reads rows from the stream in the format prescribed by the ReadSession. Each response contains one or more table rows, up to a maximum of 100 MiB per response; @@ -291,7 +328,8 @@ def read_rows( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([read_stream, offset]): + has_flattened_params = any([read_stream, offset]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -385,7 +423,7 @@ async def split_read_stream( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py index f60e862d..3f04760f 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -133,6 +133,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> BigQueryReadTransport: + """Return the transport used by the client instance. + + Returns: + BigQueryReadTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def read_session_path(project: str, location: str, session: str,) -> str: """Return a fully-qualified read_session string.""" @@ -167,6 +176,81 @@ def parse_read_stream_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def table_path(project: str, dataset: str, table: str,) -> str: + """Return a fully-qualified table string.""" + return "projects/{project}/datasets/{dataset}/tables/{table}".format( + project=project, dataset=dataset, table=table, + ) + + @staticmethod + def parse_table_path(path: str) -> Dict[str, str]: + """Parse a table path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/datasets/(?P.+?)/tables/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, @@ -202,10 +286,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index 5727ca5a..4497158f 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -118,7 +118,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, @@ -142,7 +142,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py index 36377f1d..547954b1 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py @@ -91,10 +91,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -231,12 +231,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/bigquery_storage_v1/types/storage.py b/google/cloud/bigquery_storage_v1/types/storage.py index 3460dce7..1b9c9d35 100644 --- a/google/cloud/bigquery_storage_v1/types/storage.py +++ b/google/cloud/bigquery_storage_v1/types/storage.py @@ -166,9 +166,9 @@ class ReadRowsResponse(proto.Message): row_count = proto.Field(proto.INT64, number=6) - stats = proto.Field(proto.MESSAGE, number=2, message=StreamStats,) + stats = proto.Field(proto.MESSAGE, number=2, message="StreamStats",) - throttle_state = proto.Field(proto.MESSAGE, number=5, message=ThrottleState,) + throttle_state = proto.Field(proto.MESSAGE, number=5, message="ThrottleState",) class SplitReadStreamRequest(proto.Message): diff --git a/synth.metadata b/synth.metadata index 19f14807..46d777aa 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-bigquery-storage.git", - "sha": "a7fe7626312a5b9fe1e7bd0e0fe5601ae97605c7" + "sha": "e290752ee4e771ebda01c2756b7631b40c4e1c5a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "062f46f246c78fde2160524db593fa0fa7bdbe64", - "internalRef": "337404700" + "sha": "c7331b75b0b7bbd614373b7d37085db1c80dd4be", + "internalRef": "338157137" } }, { diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index a86692f3..51286b52 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -94,12 +94,12 @@ def test_big_query_read_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "bigquerystorage.googleapis.com:443" + assert client.transport._host == "bigquerystorage.googleapis.com:443" def test_big_query_read_client_get_transport_class(): @@ -444,7 +444,7 @@ def test_create_read_session( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = stream.ReadSession( @@ -463,6 +463,7 @@ def test_create_read_session( assert args[0] == storage.CreateReadSessionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, stream.ReadSession) assert response.name == "name_value" @@ -477,18 +478,20 @@ def test_create_read_session_from_dict(): @pytest.mark.asyncio -async def test_create_read_session_async(transport: str = "grpc_asyncio"): +async def test_create_read_session_async( + transport: str = "grpc_asyncio", request_type=storage.CreateReadSessionRequest +): client = BigQueryReadAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = storage.CreateReadSessionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -505,7 +508,7 @@ async def test_create_read_session_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == storage.CreateReadSessionRequest() # Establish that the response is the type that we expect. assert isinstance(response, stream.ReadSession) @@ -517,6 +520,11 @@ async def test_create_read_session_async(transport: str = "grpc_asyncio"): assert response.table == "table_value" +@pytest.mark.asyncio +async def test_create_read_session_async_from_dict(): + await test_create_read_session_async(request_type=dict) + + def test_create_read_session_field_headers(): client = BigQueryReadClient(credentials=credentials.AnonymousCredentials(),) @@ -527,7 +535,7 @@ def test_create_read_session_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: call.return_value = stream.ReadSession() @@ -557,7 +565,7 @@ async def test_create_read_session_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(stream.ReadSession()) @@ -581,7 +589,7 @@ def test_create_read_session_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = stream.ReadSession() @@ -626,7 +634,7 @@ async def test_create_read_session_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_read_session), "__call__" + type(client.transport.create_read_session), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = stream.ReadSession() @@ -677,7 +685,7 @@ def test_read_rows(transport: str = "grpc", request_type=storage.ReadRowsRequest request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.read_rows), "__call__") as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([storage.ReadRowsResponse()]) @@ -699,19 +707,19 @@ def test_read_rows_from_dict(): @pytest.mark.asyncio -async def test_read_rows_async(transport: str = "grpc_asyncio"): +async def test_read_rows_async( + transport: str = "grpc_asyncio", request_type=storage.ReadRowsRequest +): client = BigQueryReadAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = storage.ReadRowsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.read_rows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) call.return_value.read = mock.AsyncMock( @@ -724,13 +732,18 @@ async def test_read_rows_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == storage.ReadRowsRequest() # Establish that the response is the type that we expect. message = await response.read() assert isinstance(message, storage.ReadRowsResponse) +@pytest.mark.asyncio +async def test_read_rows_async_from_dict(): + await test_read_rows_async(request_type=dict) + + def test_read_rows_field_headers(): client = BigQueryReadClient(credentials=credentials.AnonymousCredentials(),) @@ -740,7 +753,7 @@ def test_read_rows_field_headers(): request.read_stream = "read_stream/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.read_rows), "__call__") as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: call.return_value = iter([storage.ReadRowsResponse()]) client.read_rows(request) @@ -765,9 +778,7 @@ async def test_read_rows_field_headers_async(): request.read_stream = "read_stream/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.read_rows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) call.return_value.read = mock.AsyncMock( side_effect=[storage.ReadRowsResponse()] @@ -789,7 +800,7 @@ def test_read_rows_flattened(): client = BigQueryReadClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.read_rows), "__call__") as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([storage.ReadRowsResponse()]) @@ -825,9 +836,7 @@ async def test_read_rows_flattened_async(): client = BigQueryReadAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.read_rows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.read_rows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([storage.ReadRowsResponse()]) @@ -871,7 +880,7 @@ def test_split_read_stream( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.split_read_stream), "__call__" + type(client.transport.split_read_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = storage.SplitReadStreamResponse() @@ -885,6 +894,7 @@ def test_split_read_stream( assert args[0] == storage.SplitReadStreamRequest() # Establish that the response is the type that we expect. + assert isinstance(response, storage.SplitReadStreamResponse) @@ -893,18 +903,20 @@ def test_split_read_stream_from_dict(): @pytest.mark.asyncio -async def test_split_read_stream_async(transport: str = "grpc_asyncio"): +async def test_split_read_stream_async( + transport: str = "grpc_asyncio", request_type=storage.SplitReadStreamRequest +): client = BigQueryReadAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = storage.SplitReadStreamRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.split_read_stream), "__call__" + type(client.transport.split_read_stream), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -917,12 +929,17 @@ async def test_split_read_stream_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == storage.SplitReadStreamRequest() # Establish that the response is the type that we expect. assert isinstance(response, storage.SplitReadStreamResponse) +@pytest.mark.asyncio +async def test_split_read_stream_async_from_dict(): + await test_split_read_stream_async(request_type=dict) + + def test_split_read_stream_field_headers(): client = BigQueryReadClient(credentials=credentials.AnonymousCredentials(),) @@ -933,7 +950,7 @@ def test_split_read_stream_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.split_read_stream), "__call__" + type(client.transport.split_read_stream), "__call__" ) as call: call.return_value = storage.SplitReadStreamResponse() @@ -960,7 +977,7 @@ async def test_split_read_stream_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.split_read_stream), "__call__" + type(client.transport.split_read_stream), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( storage.SplitReadStreamResponse() @@ -1014,7 +1031,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = BigQueryReadClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1047,7 +1064,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = BigQueryReadClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.BigQueryReadGrpcTransport,) + assert isinstance(client.transport, transports.BigQueryReadGrpcTransport,) def test_big_query_read_base_transport_error(): @@ -1155,7 +1172,7 @@ def test_big_query_read_host_no_port(): api_endpoint="bigquerystorage.googleapis.com" ), ) - assert client._transport._host == "bigquerystorage.googleapis.com:443" + assert client.transport._host == "bigquerystorage.googleapis.com:443" def test_big_query_read_host_with_port(): @@ -1165,7 +1182,7 @@ def test_big_query_read_host_with_port(): api_endpoint="bigquerystorage.googleapis.com:8000" ), ) - assert client._transport._host == "bigquerystorage.googleapis.com:8000" + assert client.transport._host == "bigquerystorage.googleapis.com:8000" def test_big_query_read_grpc_transport_channel(): @@ -1311,10 +1328,10 @@ def test_parse_read_session_path(): def test_read_stream_path(): - project = "squid" - location = "clam" - session = "whelk" - stream = "octopus" + project = "cuttlefish" + location = "mussel" + session = "winkle" + stream = "nautilus" expected = "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}".format( project=project, location=location, session=session, stream=stream, @@ -1325,10 +1342,10 @@ def test_read_stream_path(): def test_parse_read_stream_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "session": "cuttlefish", - "stream": "mussel", + "project": "scallop", + "location": "abalone", + "session": "squid", + "stream": "clam", } path = BigQueryReadClient.read_stream_path(**expected) @@ -1337,6 +1354,132 @@ def test_parse_read_stream_path(): assert expected == actual +def test_table_path(): + project = "whelk" + dataset = "octopus" + table = "oyster" + + expected = "projects/{project}/datasets/{dataset}/tables/{table}".format( + project=project, dataset=dataset, table=table, + ) + actual = BigQueryReadClient.table_path(project, dataset, table) + assert expected == actual + + +def test_parse_table_path(): + expected = { + "project": "nudibranch", + "dataset": "cuttlefish", + "table": "mussel", + } + path = BigQueryReadClient.table_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_table_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BigQueryReadClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = BigQueryReadClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + + expected = "folders/{folder}".format(folder=folder,) + actual = BigQueryReadClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = BigQueryReadClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + + expected = "organizations/{organization}".format(organization=organization,) + actual = BigQueryReadClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = BigQueryReadClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + + expected = "projects/{project}".format(project=project,) + actual = BigQueryReadClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = BigQueryReadClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = BigQueryReadClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = BigQueryReadClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BigQueryReadClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo()