From 672d8218d27238bfbe7443355accebde6e9ae6da Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 14:59:55 -0800 Subject: [PATCH] feat: add common resource path helpers; expose client transport; remove gRPC send/recv limit (#12) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: Add Java microgen rules to imports Source-Author: Mira Leung Source-Date: Mon Sep 21 18:04:02 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: aaac658367398e478d650768344b88acebad50d9 Source-Link: https://github.com/googleapis/googleapis/commit/aaac658367398e478d650768344b88acebad50d9 * feat: added ReportInventory RPC PiperOrigin-RevId: 333583499 Source-Author: Google APIs Source-Date: Thu Sep 24 13:18:54 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 2987612c6aacc1857ee35468e7aeb1c393460799 Source-Link: https://github.com/googleapis/googleapis/commit/2987612c6aacc1857ee35468e7aeb1c393460799 * feat: Added force option for Purge APIs feat: Added API service description fix!: Renamed ProductLevelConfig enum names in CatalogService BREAKING CHANGE: the renaming is a breaking change but since the API is not used anywhere and client libraries generation has not started, it's a safe change. fix: a few minor API doc changes. PiperOrigin-RevId: 339268186 Source-Author: Google APIs Source-Date: Tue Oct 27 09:45:44 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 6516b525ee76094f1de9b7a8b0abaff91f2e5eb2 Source-Link: https://github.com/googleapis/googleapis/commit/6516b525ee76094f1de9b7a8b0abaff91f2e5eb2 * chore: upgrade to gapic-generator 0.35.9 PiperOrigin-RevId: 339292950 Source-Author: Google APIs Source-Date: Tue Oct 27 11:32:46 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 07d41a7e5cade45aba6f0d277c89722b48f2c956 Source-Link: https://github.com/googleapis/googleapis/commit/07d41a7e5cade45aba6f0d277c89722b48f2c956 * fix: remove client recv msg limit fix: add enums to `types/__init__.py` PiperOrigin-RevId: 347055288 Source-Author: Google APIs Source-Date: Fri Dec 11 12:44:37 2020 -0800 Source-Repo: googleapis/googleapis Source-Sha: dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 Source-Link: https://github.com/googleapis/googleapis/commit/dd372aa22ded7a8ba6f0e03a80e06358a3fa0907 --- docs/executions_v1beta/types.rst | 1 + docs/workflows_v1beta/types.rst | 1 + .../services/executions/async_client.py | 46 ++- .../services/executions/client.py | 92 +++++- .../executions/transports/__init__.py | 1 - .../services/executions/transports/grpc.py | 28 +- .../executions/transports/grpc_asyncio.py | 12 + .../executions_v1beta/types/__init__.py | 3 +- .../executions_v1beta/types/executions.py | 4 +- .../services/workflows/async_client.py | 47 ++- .../services/workflows/client.py | 76 ++++- .../services/workflows/transports/__init__.py | 1 - .../services/workflows/transports/grpc.py | 37 ++- .../workflows/transports/grpc_asyncio.py | 19 +- .../cloud/workflows_v1beta/types/__init__.py | 1 - .../cloud/workflows_v1beta/types/workflows.py | 6 +- noxfile.py | 2 +- synth.metadata | 6 +- .../executions_v1beta/test_executions.py | 303 +++++++++++++----- .../gapic/workflows_v1beta/test_workflows.py | 299 +++++++++++------ 20 files changed, 748 insertions(+), 237 deletions(-) diff --git a/docs/executions_v1beta/types.rst b/docs/executions_v1beta/types.rst index 7095ac2..6c92f22 100644 --- a/docs/executions_v1beta/types.rst +++ b/docs/executions_v1beta/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Workflows Executions v1beta API .. automodule:: google.cloud.workflows.executions_v1beta.types :members: + :show-inheritance: diff --git a/docs/workflows_v1beta/types.rst b/docs/workflows_v1beta/types.rst index 96b8ad2..0b7a857 100644 --- a/docs/workflows_v1beta/types.rst +++ b/docs/workflows_v1beta/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Workflows v1beta API .. automodule:: google.cloud.workflows_v1beta.types :members: + :show-inheritance: diff --git a/google/cloud/workflows/executions_v1beta/services/executions/async_client.py b/google/cloud/workflows/executions_v1beta/services/executions/async_client.py index 801decb..2fc2efe 100644 --- a/google/cloud/workflows/executions_v1beta/services/executions/async_client.py +++ b/google/cloud/workflows/executions_v1beta/services/executions/async_client.py @@ -50,10 +50,44 @@ class ExecutionsAsyncClient: execution_path = staticmethod(ExecutionsClient.execution_path) parse_execution_path = staticmethod(ExecutionsClient.parse_execution_path) + workflow_path = staticmethod(ExecutionsClient.workflow_path) + parse_workflow_path = staticmethod(ExecutionsClient.parse_workflow_path) + + common_billing_account_path = staticmethod( + ExecutionsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ExecutionsClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(ExecutionsClient.common_folder_path) + parse_common_folder_path = staticmethod(ExecutionsClient.parse_common_folder_path) + + common_organization_path = staticmethod(ExecutionsClient.common_organization_path) + parse_common_organization_path = staticmethod( + ExecutionsClient.parse_common_organization_path + ) + + common_project_path = staticmethod(ExecutionsClient.common_project_path) + parse_common_project_path = staticmethod(ExecutionsClient.parse_common_project_path) + + common_location_path = staticmethod(ExecutionsClient.common_location_path) + parse_common_location_path = staticmethod( + ExecutionsClient.parse_common_location_path + ) from_service_account_file = ExecutionsClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> ExecutionsTransport: + """Return the transport used by the client instance. + + Returns: + ExecutionsTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(ExecutionsClient).get_transport_class, type(ExecutionsClient) ) @@ -154,7 +188,8 @@ async def list_executions( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -243,7 +278,8 @@ async def create_execution( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, execution]): + has_flattened_params = any([parent, execution]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -318,7 +354,8 @@ async def get_execution( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -391,7 +428,8 @@ async def cancel_execution( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/workflows/executions_v1beta/services/executions/client.py b/google/cloud/workflows/executions_v1beta/services/executions/client.py index 743398a..2b8606a 100644 --- a/google/cloud/workflows/executions_v1beta/services/executions/client.py +++ b/google/cloud/workflows/executions_v1beta/services/executions/client.py @@ -132,6 +132,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> ExecutionsTransport: + """Return the transport used by the client instance. + + Returns: + ExecutionsTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def execution_path( project: str, location: str, workflow: str, execution: str, @@ -150,6 +159,81 @@ def parse_execution_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def workflow_path(project: str, location: str, workflow: str,) -> str: + """Return a fully-qualified workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + + @staticmethod + def parse_workflow_path(path: str) -> Dict[str, str]: + """Parse a workflow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, @@ -185,10 +269,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/workflows/executions_v1beta/services/executions/transports/__init__.py b/google/cloud/workflows/executions_v1beta/services/executions/transports/__init__.py index ac06e02..abab840 100644 --- a/google/cloud/workflows/executions_v1beta/services/executions/transports/__init__.py +++ b/google/cloud/workflows/executions_v1beta/services/executions/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = ExecutionsGrpcTransport _transport_registry["grpc_asyncio"] = ExecutionsGrpcAsyncIOTransport - __all__ = ( "ExecutionsTransport", "ExecutionsGrpcTransport", diff --git a/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc.py b/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc.py index 51c7050..261029b 100644 --- a/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc.py +++ b/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc.py @@ -91,10 +91,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -103,6 +103,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -110,6 +112,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -145,7 +148,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -162,6 +170,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -188,7 +200,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -223,12 +235,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc_asyncio.py b/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc_asyncio.py index da70581..7d20509 100644 --- a/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc_asyncio.py +++ b/google/cloud/workflows/executions_v1beta/services/executions/transports/grpc_asyncio.py @@ -148,6 +148,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -155,6 +157,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -190,7 +193,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -207,6 +215,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/google/cloud/workflows/executions_v1beta/types/__init__.py b/google/cloud/workflows/executions_v1beta/types/__init__.py index 45aab25..b349e65 100644 --- a/google/cloud/workflows/executions_v1beta/types/__init__.py +++ b/google/cloud/workflows/executions_v1beta/types/__init__.py @@ -22,9 +22,9 @@ CreateExecutionRequest, GetExecutionRequest, CancelExecutionRequest, + ExecutionView, ) - __all__ = ( "Execution", "ListExecutionsRequest", @@ -32,4 +32,5 @@ "CreateExecutionRequest", "GetExecutionRequest", "CancelExecutionRequest", + "ExecutionView", ) diff --git a/google/cloud/workflows/executions_v1beta/types/executions.py b/google/cloud/workflows/executions_v1beta/types/executions.py index 9eba578..ebcdc49 100644 --- a/google/cloud/workflows/executions_v1beta/types/executions.py +++ b/google/cloud/workflows/executions_v1beta/types/executions.py @@ -176,7 +176,7 @@ class ListExecutionsResponse(proto.Message): def raw_page(self): return self - executions = proto.RepeatedField(proto.MESSAGE, number=1, message=Execution,) + executions = proto.RepeatedField(proto.MESSAGE, number=1, message="Execution",) next_page_token = proto.Field(proto.STRING, number=2) @@ -199,7 +199,7 @@ class CreateExecutionRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - execution = proto.Field(proto.MESSAGE, number=2, message=Execution,) + execution = proto.Field(proto.MESSAGE, number=2, message="Execution",) class GetExecutionRequest(proto.Message): diff --git a/google/cloud/workflows_v1beta/services/workflows/async_client.py b/google/cloud/workflows_v1beta/services/workflows/async_client.py index 3f627d4..2e77197 100644 --- a/google/cloud/workflows_v1beta/services/workflows/async_client.py +++ b/google/cloud/workflows_v1beta/services/workflows/async_client.py @@ -55,9 +55,41 @@ class WorkflowsAsyncClient: workflow_path = staticmethod(WorkflowsClient.workflow_path) parse_workflow_path = staticmethod(WorkflowsClient.parse_workflow_path) + common_billing_account_path = staticmethod( + WorkflowsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WorkflowsClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(WorkflowsClient.common_folder_path) + parse_common_folder_path = staticmethod(WorkflowsClient.parse_common_folder_path) + + common_organization_path = staticmethod(WorkflowsClient.common_organization_path) + parse_common_organization_path = staticmethod( + WorkflowsClient.parse_common_organization_path + ) + + common_project_path = staticmethod(WorkflowsClient.common_project_path) + parse_common_project_path = staticmethod(WorkflowsClient.parse_common_project_path) + + common_location_path = staticmethod(WorkflowsClient.common_location_path) + parse_common_location_path = staticmethod( + WorkflowsClient.parse_common_location_path + ) + from_service_account_file = WorkflowsClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> WorkflowsTransport: + """Return the transport used by the client instance. + + Returns: + WorkflowsTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(WorkflowsClient).get_transport_class, type(WorkflowsClient) ) @@ -155,7 +187,8 @@ async def list_workflows( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -234,7 +267,8 @@ async def get_workflow( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -336,7 +370,8 @@ async def create_workflow( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, workflow, workflow_id]): + has_flattened_params = any([parent, workflow, workflow_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -438,7 +473,8 @@ async def delete_workflow( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -533,7 +569,8 @@ async def update_workflow( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([workflow, update_mask]): + has_flattened_params = any([workflow, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/workflows_v1beta/services/workflows/client.py b/google/cloud/workflows_v1beta/services/workflows/client.py index a2da8ef..e0c045e 100644 --- a/google/cloud/workflows_v1beta/services/workflows/client.py +++ b/google/cloud/workflows_v1beta/services/workflows/client.py @@ -136,6 +136,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> WorkflowsTransport: + """Return the transport used by the client instance. + + Returns: + WorkflowsTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def workflow_path(project: str, location: str, workflow: str,) -> str: """Return a fully-qualified workflow string.""" @@ -152,6 +161,65 @@ def parse_workflow_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, @@ -187,10 +255,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: diff --git a/google/cloud/workflows_v1beta/services/workflows/transports/__init__.py b/google/cloud/workflows_v1beta/services/workflows/transports/__init__.py index 69db172..ceeb985 100644 --- a/google/cloud/workflows_v1beta/services/workflows/transports/__init__.py +++ b/google/cloud/workflows_v1beta/services/workflows/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = WorkflowsGrpcTransport _transport_registry["grpc_asyncio"] = WorkflowsGrpcAsyncIOTransport - __all__ = ( "WorkflowsTransport", "WorkflowsGrpcTransport", diff --git a/google/cloud/workflows_v1beta/services/workflows/transports/grpc.py b/google/cloud/workflows_v1beta/services/workflows/transports/grpc.py index 28eeafd..5491f27 100644 --- a/google/cloud/workflows_v1beta/services/workflows/transports/grpc.py +++ b/google/cloud/workflows_v1beta/services/workflows/transports/grpc.py @@ -93,10 +93,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -105,6 +105,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -112,6 +114,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -147,7 +150,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -164,9 +172,14 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None # Run the base constructor. super().__init__( @@ -190,7 +203,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -225,12 +238,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property @@ -241,13 +250,11 @@ def operations_client(self) -> operations_v1.OperationsClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsClient( - self.grpc_channel - ) + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_workflows( diff --git a/google/cloud/workflows_v1beta/services/workflows/transports/grpc_asyncio.py b/google/cloud/workflows_v1beta/services/workflows/transports/grpc_asyncio.py index 97a931a..2142311 100644 --- a/google/cloud/workflows_v1beta/services/workflows/transports/grpc_asyncio.py +++ b/google/cloud/workflows_v1beta/services/workflows/transports/grpc_asyncio.py @@ -150,6 +150,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -157,6 +159,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -192,7 +195,12 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -209,6 +217,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. @@ -222,6 +234,7 @@ def __init__( ) self._stubs = {} + self._operations_client = None @property def grpc_channel(self) -> aio.Channel: @@ -241,13 +254,13 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: client. """ # Sanity check: Only create a new client if we do not already have one. - if "operations_client" not in self.__dict__: - self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__["operations_client"] + return self._operations_client @property def list_workflows( diff --git a/google/cloud/workflows_v1beta/types/__init__.py b/google/cloud/workflows_v1beta/types/__init__.py index 24938d4..a28c34a 100644 --- a/google/cloud/workflows_v1beta/types/__init__.py +++ b/google/cloud/workflows_v1beta/types/__init__.py @@ -26,7 +26,6 @@ OperationMetadata, ) - __all__ = ( "Workflow", "ListWorkflowsRequest", diff --git a/google/cloud/workflows_v1beta/types/workflows.py b/google/cloud/workflows_v1beta/types/workflows.py index 36f29de..d4a95f1 100644 --- a/google/cloud/workflows_v1beta/types/workflows.py +++ b/google/cloud/workflows_v1beta/types/workflows.py @@ -197,7 +197,7 @@ class ListWorkflowsResponse(proto.Message): def raw_page(self): return self - workflows = proto.RepeatedField(proto.MESSAGE, number=1, message=Workflow,) + workflows = proto.RepeatedField(proto.MESSAGE, number=1, message="Workflow",) next_page_token = proto.Field(proto.STRING, number=2) @@ -245,7 +245,7 @@ class CreateWorkflowRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - workflow = proto.Field(proto.MESSAGE, number=2, message=Workflow,) + workflow = proto.Field(proto.MESSAGE, number=2, message="Workflow",) workflow_id = proto.Field(proto.STRING, number=3) @@ -278,7 +278,7 @@ class UpdateWorkflowRequest(proto.Message): the entire workflow will be updated. """ - workflow = proto.Field(proto.MESSAGE, number=1, message=Workflow,) + workflow = proto.Field(proto.MESSAGE, number=1, message="Workflow",) update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) diff --git a/noxfile.py b/noxfile.py index a65a3e7..c893c37 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/synth.metadata b/synth.metadata index ed8de73..3fa9894 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-workflows.git", - "sha": "60d393078c39eec8756c65338860e46aa641d31d" + "sha": "1a1d3df8042587954a37ed69f941a542460c1f81" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2987612c6aacc1857ee35468e7aeb1c393460799", - "internalRef": "333583499" + "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", + "internalRef": "347055288" } }, { diff --git a/tests/unit/gapic/executions_v1beta/test_executions.py b/tests/unit/gapic/executions_v1beta/test_executions.py index 510a9f3..e81f349 100644 --- a/tests/unit/gapic/executions_v1beta/test_executions.py +++ b/tests/unit/gapic/executions_v1beta/test_executions.py @@ -94,12 +94,12 @@ def test_executions_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "workflowexecutions.googleapis.com:443" + assert client.transport._host == "workflowexecutions.googleapis.com:443" def test_executions_client_get_transport_class(): @@ -443,7 +443,7 @@ def test_list_executions( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_executions), "__call__") as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.ListExecutionsResponse( next_page_token="next_page_token_value", @@ -458,6 +458,7 @@ def test_list_executions( assert args[0] == executions.ListExecutionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExecutionsPager) assert response.next_page_token == "next_page_token_value" @@ -468,19 +469,19 @@ def test_list_executions_from_dict(): @pytest.mark.asyncio -async def test_list_executions_async(transport: str = "grpc_asyncio"): +async def test_list_executions_async( + transport: str = "grpc_asyncio", request_type=executions.ListExecutionsRequest +): client = ExecutionsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = executions.ListExecutionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_executions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.ListExecutionsResponse(next_page_token="next_page_token_value",) @@ -492,7 +493,7 @@ async def test_list_executions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == executions.ListExecutionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExecutionsAsyncPager) @@ -500,6 +501,11 @@ async def test_list_executions_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_executions_async_from_dict(): + await test_list_executions_async(request_type=dict) + + def test_list_executions_field_headers(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) @@ -509,7 +515,7 @@ def test_list_executions_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_executions), "__call__") as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: call.return_value = executions.ListExecutionsResponse() client.list_executions(request) @@ -534,9 +540,7 @@ async def test_list_executions_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_executions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.ListExecutionsResponse() ) @@ -557,7 +561,7 @@ def test_list_executions_flattened(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_executions), "__call__") as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.ListExecutionsResponse() @@ -589,9 +593,7 @@ async def test_list_executions_flattened_async(): client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_executions), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.ListExecutionsResponse() @@ -626,7 +628,7 @@ def test_list_executions_pager(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_executions), "__call__") as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( executions.ListExecutionsResponse( @@ -664,7 +666,7 @@ def test_list_executions_pages(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_executions), "__call__") as call: + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( executions.ListExecutionsResponse( @@ -695,9 +697,7 @@ async def test_list_executions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_executions), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -734,9 +734,7 @@ async def test_list_executions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_executions), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -776,9 +774,7 @@ def test_create_execution( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution( name="name_value", @@ -797,6 +793,7 @@ def test_create_execution( assert args[0] == executions.CreateExecutionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) assert response.name == "name_value" @@ -815,19 +812,19 @@ def test_create_execution_from_dict(): @pytest.mark.asyncio -async def test_create_execution_async(transport: str = "grpc_asyncio"): +async def test_create_execution_async( + transport: str = "grpc_asyncio", request_type=executions.CreateExecutionRequest +): client = ExecutionsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = executions.CreateExecutionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution( @@ -845,7 +842,7 @@ async def test_create_execution_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == executions.CreateExecutionRequest() # Establish that the response is the type that we expect. assert isinstance(response, executions.Execution) @@ -861,6 +858,11 @@ async def test_create_execution_async(transport: str = "grpc_asyncio"): assert response.workflow_revision_id == "workflow_revision_id_value" +@pytest.mark.asyncio +async def test_create_execution_async_from_dict(): + await test_create_execution_async(request_type=dict) + + def test_create_execution_field_headers(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) @@ -870,9 +872,7 @@ def test_create_execution_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: call.return_value = executions.Execution() client.create_execution(request) @@ -897,9 +897,7 @@ async def test_create_execution_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution() ) @@ -920,9 +918,7 @@ def test_create_execution_flattened(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -960,9 +956,7 @@ async def test_create_execution_flattened_async(): client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -1011,7 +1005,7 @@ def test_get_execution( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_execution), "__call__") as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution( name="name_value", @@ -1030,6 +1024,7 @@ def test_get_execution( assert args[0] == executions.GetExecutionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) assert response.name == "name_value" @@ -1048,19 +1043,19 @@ def test_get_execution_from_dict(): @pytest.mark.asyncio -async def test_get_execution_async(transport: str = "grpc_asyncio"): +async def test_get_execution_async( + transport: str = "grpc_asyncio", request_type=executions.GetExecutionRequest +): client = ExecutionsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = executions.GetExecutionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution( @@ -1078,7 +1073,7 @@ async def test_get_execution_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == executions.GetExecutionRequest() # Establish that the response is the type that we expect. assert isinstance(response, executions.Execution) @@ -1094,6 +1089,11 @@ async def test_get_execution_async(transport: str = "grpc_asyncio"): assert response.workflow_revision_id == "workflow_revision_id_value" +@pytest.mark.asyncio +async def test_get_execution_async_from_dict(): + await test_get_execution_async(request_type=dict) + + def test_get_execution_field_headers(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) @@ -1103,7 +1103,7 @@ def test_get_execution_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_execution), "__call__") as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: call.return_value = executions.Execution() client.get_execution(request) @@ -1128,9 +1128,7 @@ async def test_get_execution_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution() ) @@ -1151,7 +1149,7 @@ def test_get_execution_flattened(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_execution), "__call__") as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -1183,9 +1181,7 @@ async def test_get_execution_flattened_async(): client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -1228,9 +1224,7 @@ def test_cancel_execution( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution( name="name_value", @@ -1249,6 +1243,7 @@ def test_cancel_execution( assert args[0] == executions.CancelExecutionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) assert response.name == "name_value" @@ -1267,19 +1262,19 @@ def test_cancel_execution_from_dict(): @pytest.mark.asyncio -async def test_cancel_execution_async(transport: str = "grpc_asyncio"): +async def test_cancel_execution_async( + transport: str = "grpc_asyncio", request_type=executions.CancelExecutionRequest +): client = ExecutionsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = executions.CancelExecutionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution( @@ -1297,7 +1292,7 @@ async def test_cancel_execution_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == executions.CancelExecutionRequest() # Establish that the response is the type that we expect. assert isinstance(response, executions.Execution) @@ -1313,6 +1308,11 @@ async def test_cancel_execution_async(transport: str = "grpc_asyncio"): assert response.workflow_revision_id == "workflow_revision_id_value" +@pytest.mark.asyncio +async def test_cancel_execution_async_from_dict(): + await test_cancel_execution_async(request_type=dict) + + def test_cancel_execution_field_headers(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) @@ -1322,9 +1322,7 @@ def test_cancel_execution_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: call.return_value = executions.Execution() client.cancel_execution(request) @@ -1349,9 +1347,7 @@ async def test_cancel_execution_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( executions.Execution() ) @@ -1372,9 +1368,7 @@ def test_cancel_execution_flattened(): client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -1406,9 +1400,7 @@ async def test_cancel_execution_flattened_async(): client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_execution), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = executions.Execution() @@ -1475,7 +1467,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = ExecutionsClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1508,7 +1500,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.ExecutionsGrpcTransport,) + assert isinstance(client.transport, transports.ExecutionsGrpcTransport,) def test_executions_base_transport_error(): @@ -1605,7 +1597,7 @@ def test_executions_host_no_port(): api_endpoint="workflowexecutions.googleapis.com" ), ) - assert client._transport._host == "workflowexecutions.googleapis.com:443" + assert client.transport._host == "workflowexecutions.googleapis.com:443" def test_executions_host_with_port(): @@ -1615,7 +1607,7 @@ def test_executions_host_with_port(): api_endpoint="workflowexecutions.googleapis.com:8000" ), ) - assert client._transport._host == "workflowexecutions.googleapis.com:8000" + assert client.transport._host == "workflowexecutions.googleapis.com:8000" def test_executions_grpc_transport_channel(): @@ -1627,6 +1619,7 @@ def test_executions_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_executions_grpc_asyncio_transport_channel(): @@ -1638,6 +1631,7 @@ def test_executions_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -1678,8 +1672,13 @@ def test_executions_transport_channel_mtls_with_client_cert_source(transport_cla scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -1715,6 +1714,10 @@ def test_executions_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel @@ -1746,6 +1749,132 @@ def test_parse_execution_path(): assert expected == actual +def test_workflow_path(): + project = "winkle" + location = "nautilus" + workflow = "scallop" + + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + actual = ExecutionsClient.workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_workflow_path(): + expected = { + "project": "abalone", + "location": "squid", + "workflow": "clam", + } + path = ExecutionsClient.workflow_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_workflow_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ExecutionsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ExecutionsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = ExecutionsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ExecutionsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = ExecutionsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ExecutionsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = ExecutionsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ExecutionsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ExecutionsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ExecutionsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/tests/unit/gapic/workflows_v1beta/test_workflows.py b/tests/unit/gapic/workflows_v1beta/test_workflows.py index ce6739c..5c46620 100644 --- a/tests/unit/gapic/workflows_v1beta/test_workflows.py +++ b/tests/unit/gapic/workflows_v1beta/test_workflows.py @@ -93,12 +93,12 @@ def test_workflows_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "workflows.googleapis.com:443" + assert client.transport._host == "workflows.googleapis.com:443" def test_workflows_client_get_transport_class(): @@ -440,7 +440,7 @@ def test_list_workflows( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_workflows), "__call__") as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.ListWorkflowsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], @@ -455,6 +455,7 @@ def test_list_workflows( assert args[0] == workflows.ListWorkflowsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkflowsPager) assert response.next_page_token == "next_page_token_value" @@ -467,19 +468,19 @@ def test_list_workflows_from_dict(): @pytest.mark.asyncio -async def test_list_workflows_async(transport: str = "grpc_asyncio"): +async def test_list_workflows_async( + transport: str = "grpc_asyncio", request_type=workflows.ListWorkflowsRequest +): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = workflows.ListWorkflowsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_workflows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflows.ListWorkflowsResponse( @@ -494,7 +495,7 @@ async def test_list_workflows_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == workflows.ListWorkflowsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowsAsyncPager) @@ -504,6 +505,11 @@ async def test_list_workflows_async(transport: str = "grpc_asyncio"): assert response.unreachable == ["unreachable_value"] +@pytest.mark.asyncio +async def test_list_workflows_async_from_dict(): + await test_list_workflows_async(request_type=dict) + + def test_list_workflows_field_headers(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) @@ -513,7 +519,7 @@ def test_list_workflows_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_workflows), "__call__") as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: call.return_value = workflows.ListWorkflowsResponse() client.list_workflows(request) @@ -538,9 +544,7 @@ async def test_list_workflows_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_workflows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflows.ListWorkflowsResponse() ) @@ -561,7 +565,7 @@ def test_list_workflows_flattened(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_workflows), "__call__") as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.ListWorkflowsResponse() @@ -593,9 +597,7 @@ async def test_list_workflows_flattened_async(): client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_workflows), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.ListWorkflowsResponse() @@ -630,7 +632,7 @@ def test_list_workflows_pager(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_workflows), "__call__") as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( workflows.ListWorkflowsResponse( @@ -668,7 +670,7 @@ def test_list_workflows_pages(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_workflows), "__call__") as call: + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( workflows.ListWorkflowsResponse( @@ -699,9 +701,7 @@ async def test_list_workflows_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_workflows), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_workflows), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -738,9 +738,7 @@ async def test_list_workflows_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_workflows), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_workflows), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -780,7 +778,7 @@ def test_get_workflow( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.Workflow( name="name_value", @@ -800,6 +798,7 @@ def test_get_workflow( assert args[0] == workflows.GetWorkflowRequest() # Establish that the response is the type that we expect. + assert isinstance(response, workflows.Workflow) assert response.name == "name_value" @@ -818,19 +817,19 @@ def test_get_workflow_from_dict(): @pytest.mark.asyncio -async def test_get_workflow_async(transport: str = "grpc_asyncio"): +async def test_get_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.GetWorkflowRequest +): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = workflows.GetWorkflowRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflows.Workflow( @@ -848,7 +847,7 @@ async def test_get_workflow_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == workflows.GetWorkflowRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflows.Workflow) @@ -864,6 +863,11 @@ async def test_get_workflow_async(transport: str = "grpc_asyncio"): assert response.service_account == "service_account_value" +@pytest.mark.asyncio +async def test_get_workflow_async_from_dict(): + await test_get_workflow_async(request_type=dict) + + def test_get_workflow_field_headers(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) @@ -873,7 +877,7 @@ def test_get_workflow_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: call.return_value = workflows.Workflow() client.get_workflow(request) @@ -898,9 +902,7 @@ async def test_get_workflow_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflows.Workflow()) await client.get_workflow(request) @@ -919,7 +921,7 @@ def test_get_workflow_flattened(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.Workflow() @@ -951,9 +953,7 @@ async def test_get_workflow_flattened_async(): client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = workflows.Workflow() @@ -994,7 +994,7 @@ def test_create_workflow( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1015,19 +1015,19 @@ def test_create_workflow_from_dict(): @pytest.mark.asyncio -async def test_create_workflow_async(transport: str = "grpc_asyncio"): +async def test_create_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.CreateWorkflowRequest +): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = workflows.CreateWorkflowRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1039,12 +1039,17 @@ async def test_create_workflow_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == workflows.CreateWorkflowRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_create_workflow_async_from_dict(): + await test_create_workflow_async(request_type=dict) + + def test_create_workflow_field_headers(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) @@ -1054,7 +1059,7 @@ def test_create_workflow_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_workflow(request) @@ -1079,9 +1084,7 @@ async def test_create_workflow_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1102,7 +1105,7 @@ def test_create_workflow_flattened(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1145,9 +1148,7 @@ async def test_create_workflow_flattened_async(): client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1201,7 +1202,7 @@ def test_delete_workflow( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1222,19 +1223,19 @@ def test_delete_workflow_from_dict(): @pytest.mark.asyncio -async def test_delete_workflow_async(transport: str = "grpc_asyncio"): +async def test_delete_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.DeleteWorkflowRequest +): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = workflows.DeleteWorkflowRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1246,12 +1247,17 @@ async def test_delete_workflow_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == workflows.DeleteWorkflowRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_delete_workflow_async_from_dict(): + await test_delete_workflow_async(request_type=dict) + + def test_delete_workflow_field_headers(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) @@ -1261,7 +1267,7 @@ def test_delete_workflow_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_workflow(request) @@ -1286,9 +1292,7 @@ async def test_delete_workflow_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1309,7 +1313,7 @@ def test_delete_workflow_flattened(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1341,9 +1345,7 @@ async def test_delete_workflow_flattened_async(): client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1386,7 +1388,7 @@ def test_update_workflow( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1407,19 +1409,19 @@ def test_update_workflow_from_dict(): @pytest.mark.asyncio -async def test_update_workflow_async(transport: str = "grpc_asyncio"): +async def test_update_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.UpdateWorkflowRequest +): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = workflows.UpdateWorkflowRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1431,12 +1433,17 @@ async def test_update_workflow_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == workflows.UpdateWorkflowRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_update_workflow_async_from_dict(): + await test_update_workflow_async(request_type=dict) + + def test_update_workflow_field_headers(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) @@ -1446,7 +1453,7 @@ def test_update_workflow_field_headers(): request.workflow.name = "workflow.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_workflow(request) @@ -1473,9 +1480,7 @@ async def test_update_workflow_field_headers_async(): request.workflow.name = "workflow.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) @@ -1498,7 +1503,7 @@ def test_update_workflow_flattened(): client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_workflow), "__call__") as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1537,9 +1542,7 @@ async def test_update_workflow_flattened_async(): client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_workflow), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1613,7 +1616,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = WorkflowsClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1646,7 +1649,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.WorkflowsGrpcTransport,) + assert isinstance(client.transport, transports.WorkflowsGrpcTransport,) def test_workflows_base_transport_error(): @@ -1749,7 +1752,7 @@ def test_workflows_host_no_port(): api_endpoint="workflows.googleapis.com" ), ) - assert client._transport._host == "workflows.googleapis.com:443" + assert client.transport._host == "workflows.googleapis.com:443" def test_workflows_host_with_port(): @@ -1759,7 +1762,7 @@ def test_workflows_host_with_port(): api_endpoint="workflows.googleapis.com:8000" ), ) - assert client._transport._host == "workflows.googleapis.com:8000" + assert client.transport._host == "workflows.googleapis.com:8000" def test_workflows_grpc_transport_channel(): @@ -1771,6 +1774,7 @@ def test_workflows_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_workflows_grpc_asyncio_transport_channel(): @@ -1782,6 +1786,7 @@ def test_workflows_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -1822,8 +1827,13 @@ def test_workflows_transport_channel_mtls_with_client_cert_source(transport_clas scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -1859,6 +1869,10 @@ def test_workflows_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel @@ -1867,7 +1881,7 @@ def test_workflows_grpc_lro_client(): client = WorkflowsClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -1880,7 +1894,7 @@ def test_workflows_grpc_lro_async_client(): client = WorkflowsAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) @@ -1914,6 +1928,107 @@ def test_parse_workflow_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = WorkflowsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = WorkflowsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = WorkflowsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = WorkflowsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = WorkflowsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = WorkflowsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = WorkflowsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = WorkflowsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = WorkflowsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = WorkflowsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo()