From a843aaed7e295f951650b81ce3da5cbece4ebab7 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 31 Mar 2021 09:10:03 -0600 Subject: [PATCH] feat: add v1 (#36) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-workflows/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) Fixes #30 🦕 --- docs/executions_v1/executions.rst | 11 + docs/executions_v1/services.rst | 6 + docs/executions_v1/types.rst | 7 + docs/index.rst | 16 +- docs/workflows_v1/services.rst | 6 + docs/workflows_v1/types.rst | 7 + docs/workflows_v1/workflows.rst | 11 + google/cloud/workflows/__init__.py | 20 +- google/cloud/workflows/executions/__init__.py | 28 +- .../cloud/workflows/executions_v1/__init__.py | 37 + google/cloud/workflows/executions_v1/py.typed | 2 + .../executions_v1/services/__init__.py | 16 + .../services/executions/__init__.py | 24 + .../services/executions/async_client.py | 507 ++++ .../services/executions/client.py | 705 ++++++ .../services/executions/pagers.py | 157 ++ .../executions/transports/__init__.py | 35 + .../services/executions/transports/base.py | 161 ++ .../services/executions/transports/grpc.py | 337 +++ .../executions/transports/grpc_asyncio.py | 341 +++ .../workflows/executions_v1/types/__init__.py | 36 + .../executions_v1/types/executions.py | 239 ++ google/cloud/workflows_v1/__init__.py | 39 + google/cloud/workflows_v1/py.typed | 2 + .../cloud/workflows_v1/services/__init__.py | 16 + .../services/workflows/__init__.py | 24 + .../services/workflows/async_client.py | 660 +++++ .../workflows_v1/services/workflows/client.py | 843 +++++++ .../workflows_v1/services/workflows/pagers.py | 157 ++ .../services/workflows/transports/__init__.py | 35 + .../services/workflows/transports/base.py | 180 ++ .../services/workflows/transports/grpc.py | 385 +++ .../workflows/transports/grpc_asyncio.py | 393 +++ google/cloud/workflows_v1/types/__init__.py | 38 + google/cloud/workflows_v1/types/workflows.py | 314 +++ .../services/workflows/async_client.py | 1 - .../services/workflows/client.py | 1 - synth.metadata | 30 +- synth.py | 73 +- tests/unit/gapic/executions_v1/__init__.py | 16 + .../gapic/executions_v1/test_executions.py | 2007 +++++++++++++++ tests/unit/gapic/workflows_v1/__init__.py | 16 + .../unit/gapic/workflows_v1/test_workflows.py | 2179 +++++++++++++++++ 43 files changed, 10043 insertions(+), 75 deletions(-) create mode 100644 docs/executions_v1/executions.rst create mode 100644 docs/executions_v1/services.rst create mode 100644 docs/executions_v1/types.rst create mode 100644 docs/workflows_v1/services.rst create mode 100644 docs/workflows_v1/types.rst create mode 100644 docs/workflows_v1/workflows.rst create mode 100644 google/cloud/workflows/executions_v1/__init__.py create mode 100644 google/cloud/workflows/executions_v1/py.typed create mode 100644 google/cloud/workflows/executions_v1/services/__init__.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/__init__.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/async_client.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/client.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/pagers.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/transports/__init__.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/transports/base.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/transports/grpc.py create mode 100644 google/cloud/workflows/executions_v1/services/executions/transports/grpc_asyncio.py create mode 100644 google/cloud/workflows/executions_v1/types/__init__.py create mode 100644 google/cloud/workflows/executions_v1/types/executions.py create mode 100644 google/cloud/workflows_v1/__init__.py create mode 100644 google/cloud/workflows_v1/py.typed create mode 100644 google/cloud/workflows_v1/services/__init__.py create mode 100644 google/cloud/workflows_v1/services/workflows/__init__.py create mode 100644 google/cloud/workflows_v1/services/workflows/async_client.py create mode 100644 google/cloud/workflows_v1/services/workflows/client.py create mode 100644 google/cloud/workflows_v1/services/workflows/pagers.py create mode 100644 google/cloud/workflows_v1/services/workflows/transports/__init__.py create mode 100644 google/cloud/workflows_v1/services/workflows/transports/base.py create mode 100644 google/cloud/workflows_v1/services/workflows/transports/grpc.py create mode 100644 google/cloud/workflows_v1/services/workflows/transports/grpc_asyncio.py create mode 100644 google/cloud/workflows_v1/types/__init__.py create mode 100644 google/cloud/workflows_v1/types/workflows.py create mode 100644 tests/unit/gapic/executions_v1/__init__.py create mode 100644 tests/unit/gapic/executions_v1/test_executions.py create mode 100644 tests/unit/gapic/workflows_v1/__init__.py create mode 100644 tests/unit/gapic/workflows_v1/test_workflows.py diff --git a/docs/executions_v1/executions.rst b/docs/executions_v1/executions.rst new file mode 100644 index 0000000..cf374ec --- /dev/null +++ b/docs/executions_v1/executions.rst @@ -0,0 +1,11 @@ +Executions +---------------------------- + +.. automodule:: google.cloud.workflows.executions_v1.services.executions + :members: + :inherited-members: + + +.. automodule:: google.cloud.workflows.executions_v1.services.executions.pagers + :members: + :inherited-members: diff --git a/docs/executions_v1/services.rst b/docs/executions_v1/services.rst new file mode 100644 index 0000000..bc1bbaa --- /dev/null +++ b/docs/executions_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Workflows Executions v1 API +===================================================== +.. toctree:: + :maxdepth: 2 + + executions diff --git a/docs/executions_v1/types.rst b/docs/executions_v1/types.rst new file mode 100644 index 0000000..9210a22 --- /dev/null +++ b/docs/executions_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Workflows Executions v1 API +================================================== + +.. automodule:: google.cloud.workflows.executions_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index e5432e4..36c5a5f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,7 +2,21 @@ .. include:: multiprocessing.rst -API Reference +This package includes clients for multiple versions of the Workflows API By default, you will get ``v1``, the latest version. + +v1 +--- +.. toctree:: + :maxdepth: 2 + + workflows_v1/services + executions_v1/services + workflows_v1/types + executions_v1/types + +The previous beta release, spelled ``v1beta`` is provided to continue to support code previously written against it. In order to use it, you will want to import from it e.g., ``google.cloud.workflows_v1`` in lieu of ``google.cloud.workflows`` (or the equivalent ``google.cloud.workflows_v1``). + +v1beta ------------- .. toctree:: :maxdepth: 2 diff --git a/docs/workflows_v1/services.rst b/docs/workflows_v1/services.rst new file mode 100644 index 0000000..460e0a3 --- /dev/null +++ b/docs/workflows_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Workflows v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + workflows diff --git a/docs/workflows_v1/types.rst b/docs/workflows_v1/types.rst new file mode 100644 index 0000000..31d6c6e --- /dev/null +++ b/docs/workflows_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Workflows v1 API +======================================= + +.. automodule:: google.cloud.workflows_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/workflows_v1/workflows.rst b/docs/workflows_v1/workflows.rst new file mode 100644 index 0000000..66dfaa6 --- /dev/null +++ b/docs/workflows_v1/workflows.rst @@ -0,0 +1,11 @@ +Workflows +--------------------------- + +.. automodule:: google.cloud.workflows_v1.services.workflows + :members: + :inherited-members: + + +.. automodule:: google.cloud.workflows_v1.services.workflows.pagers + :members: + :inherited-members: diff --git a/google/cloud/workflows/__init__.py b/google/cloud/workflows/__init__.py index 71ec9f6..3929d77 100644 --- a/google/cloud/workflows/__init__.py +++ b/google/cloud/workflows/__init__.py @@ -15,18 +15,18 @@ # limitations under the License. # -from google.cloud.workflows_v1beta.services.workflows.async_client import ( +from google.cloud.workflows_v1.services.workflows.async_client import ( WorkflowsAsyncClient, ) -from google.cloud.workflows_v1beta.services.workflows.client import WorkflowsClient -from google.cloud.workflows_v1beta.types.workflows import CreateWorkflowRequest -from google.cloud.workflows_v1beta.types.workflows import DeleteWorkflowRequest -from google.cloud.workflows_v1beta.types.workflows import GetWorkflowRequest -from google.cloud.workflows_v1beta.types.workflows import ListWorkflowsRequest -from google.cloud.workflows_v1beta.types.workflows import ListWorkflowsResponse -from google.cloud.workflows_v1beta.types.workflows import OperationMetadata -from google.cloud.workflows_v1beta.types.workflows import UpdateWorkflowRequest -from google.cloud.workflows_v1beta.types.workflows import Workflow +from google.cloud.workflows_v1.services.workflows.client import WorkflowsClient +from google.cloud.workflows_v1.types.workflows import CreateWorkflowRequest +from google.cloud.workflows_v1.types.workflows import DeleteWorkflowRequest +from google.cloud.workflows_v1.types.workflows import GetWorkflowRequest +from google.cloud.workflows_v1.types.workflows import ListWorkflowsRequest +from google.cloud.workflows_v1.types.workflows import ListWorkflowsResponse +from google.cloud.workflows_v1.types.workflows import OperationMetadata +from google.cloud.workflows_v1.types.workflows import UpdateWorkflowRequest +from google.cloud.workflows_v1.types.workflows import Workflow __all__ = ( "CreateWorkflowRequest", diff --git a/google/cloud/workflows/executions/__init__.py b/google/cloud/workflows/executions/__init__.py index c695dea..4d74157 100644 --- a/google/cloud/workflows/executions/__init__.py +++ b/google/cloud/workflows/executions/__init__.py @@ -15,29 +15,19 @@ # limitations under the License. # -from google.cloud.workflows.executions_v1beta.services.executions.async_client import ( +from google.cloud.workflows.executions_v1.services.executions.async_client import ( ExecutionsAsyncClient, ) -from google.cloud.workflows.executions_v1beta.services.executions.client import ( +from google.cloud.workflows.executions_v1.services.executions.client import ( ExecutionsClient, ) -from google.cloud.workflows.executions_v1beta.types.executions import ( - CancelExecutionRequest, -) -from google.cloud.workflows.executions_v1beta.types.executions import ( - CreateExecutionRequest, -) -from google.cloud.workflows.executions_v1beta.types.executions import Execution -from google.cloud.workflows.executions_v1beta.types.executions import ExecutionView -from google.cloud.workflows.executions_v1beta.types.executions import ( - GetExecutionRequest, -) -from google.cloud.workflows.executions_v1beta.types.executions import ( - ListExecutionsRequest, -) -from google.cloud.workflows.executions_v1beta.types.executions import ( - ListExecutionsResponse, -) +from google.cloud.workflows.executions_v1.types.executions import CancelExecutionRequest +from google.cloud.workflows.executions_v1.types.executions import CreateExecutionRequest +from google.cloud.workflows.executions_v1.types.executions import Execution +from google.cloud.workflows.executions_v1.types.executions import ExecutionView +from google.cloud.workflows.executions_v1.types.executions import GetExecutionRequest +from google.cloud.workflows.executions_v1.types.executions import ListExecutionsRequest +from google.cloud.workflows.executions_v1.types.executions import ListExecutionsResponse __all__ = ( "CancelExecutionRequest", diff --git a/google/cloud/workflows/executions_v1/__init__.py b/google/cloud/workflows/executions_v1/__init__.py new file mode 100644 index 0000000..65343af --- /dev/null +++ b/google/cloud/workflows/executions_v1/__init__.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.executions import ExecutionsClient +from .types.executions import CancelExecutionRequest +from .types.executions import CreateExecutionRequest +from .types.executions import Execution +from .types.executions import ExecutionView +from .types.executions import GetExecutionRequest +from .types.executions import ListExecutionsRequest +from .types.executions import ListExecutionsResponse + + +__all__ = ( + "CancelExecutionRequest", + "CreateExecutionRequest", + "Execution", + "ExecutionView", + "GetExecutionRequest", + "ListExecutionsRequest", + "ListExecutionsResponse", + "ExecutionsClient", +) diff --git a/google/cloud/workflows/executions_v1/py.typed b/google/cloud/workflows/executions_v1/py.typed new file mode 100644 index 0000000..ff4d7c1 --- /dev/null +++ b/google/cloud/workflows/executions_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-workflows-executions package uses inline types. diff --git a/google/cloud/workflows/executions_v1/services/__init__.py b/google/cloud/workflows/executions_v1/services/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/workflows/executions_v1/services/executions/__init__.py b/google/cloud/workflows/executions_v1/services/executions/__init__.py new file mode 100644 index 0000000..2a778e1 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import ExecutionsClient +from .async_client import ExecutionsAsyncClient + +__all__ = ( + "ExecutionsClient", + "ExecutionsAsyncClient", +) diff --git a/google/cloud/workflows/executions_v1/services/executions/async_client.py b/google/cloud/workflows/executions_v1/services/executions/async_client.py new file mode 100644 index 0000000..40e5b73 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/async_client.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workflows.executions_v1.services.executions import pagers +from google.cloud.workflows.executions_v1.types import executions +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import ExecutionsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ExecutionsGrpcAsyncIOTransport +from .client import ExecutionsClient + + +class ExecutionsAsyncClient: + """Executions is used to start and manage running instances of + [Workflows][google.cloud.workflows.v1.Workflow] called executions. + """ + + _client: ExecutionsClient + + DEFAULT_ENDPOINT = ExecutionsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ExecutionsClient.DEFAULT_MTLS_ENDPOINT + + execution_path = staticmethod(ExecutionsClient.execution_path) + parse_execution_path = staticmethod(ExecutionsClient.parse_execution_path) + workflow_path = staticmethod(ExecutionsClient.workflow_path) + parse_workflow_path = staticmethod(ExecutionsClient.parse_workflow_path) + + common_billing_account_path = staticmethod( + ExecutionsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ExecutionsClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(ExecutionsClient.common_folder_path) + parse_common_folder_path = staticmethod(ExecutionsClient.parse_common_folder_path) + + common_organization_path = staticmethod(ExecutionsClient.common_organization_path) + parse_common_organization_path = staticmethod( + ExecutionsClient.parse_common_organization_path + ) + + common_project_path = staticmethod(ExecutionsClient.common_project_path) + parse_common_project_path = staticmethod(ExecutionsClient.parse_common_project_path) + + common_location_path = staticmethod(ExecutionsClient.common_location_path) + parse_common_location_path = staticmethod( + ExecutionsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExecutionsAsyncClient: The constructed client. + """ + return ExecutionsClient.from_service_account_info.__func__(ExecutionsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExecutionsAsyncClient: The constructed client. + """ + return ExecutionsClient.from_service_account_file.__func__(ExecutionsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ExecutionsTransport: + """Return the transport used by the client instance. + + Returns: + ExecutionsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ExecutionsClient).get_transport_class, type(ExecutionsClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, ExecutionsTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the executions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ExecutionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = ExecutionsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_executions( + self, + request: executions.ListExecutionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExecutionsAsyncPager: + r"""Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + Args: + request (:class:`google.cloud.workflows.executions_v1.types.ListExecutionsRequest`): + The request object. Request for the + [ListExecutions][] + method. + parent (:class:`str`): + Required. Name of the workflow for + which the executions should be listed. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.services.executions.pagers.ListExecutionsAsyncPager: + Response for the + [ListExecutions][google.cloud.workflows.executions.v1.Executions.ListExecutions] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = executions.ListExecutionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_executions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExecutionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_execution( + self, + request: executions.CreateExecutionRequest = None, + *, + parent: str = None, + execution: executions.Execution = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Creates a new execution using the latest revision of + the given workflow. + + Args: + request (:class:`google.cloud.workflows.executions_v1.types.CreateExecutionRequest`): + The request object. Request for the + [CreateExecution][google.cloud.workflows.executions.v1.Executions.CreateExecution] + method. + parent (:class:`str`): + Required. Name of the workflow for + which an execution should be created. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + The latest revision of the workflow will + be used. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution (:class:`google.cloud.workflows.executions_v1.types.Execution`): + Required. Execution to be created. + This corresponds to the ``execution`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, execution]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = executions.CreateExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if execution is not None: + request.execution = execution + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_execution, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_execution( + self, + request: executions.GetExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Returns an execution of the given name. + + Args: + request (:class:`google.cloud.workflows.executions_v1.types.GetExecutionRequest`): + The request object. Request for the + [GetExecution][google.cloud.workflows.executions.v1.Executions.GetExecution] + method. + name (:class:`str`): + Required. Name of the execution to be + retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = executions.GetExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_execution, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_execution( + self, + request: executions.CancelExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Cancels an execution of the given name. + + Args: + request (:class:`google.cloud.workflows.executions_v1.types.CancelExecutionRequest`): + The request object. Request for the + [CancelExecution][google.cloud.workflows.executions.v1.Executions.CancelExecution] + method. + name (:class:`str`): + Required. Name of the execution to be + cancelled. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = executions.CancelExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_execution, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ExecutionsAsyncClient",) diff --git a/google/cloud/workflows/executions_v1/services/executions/client.py b/google/cloud/workflows/executions_v1/services/executions/client.py new file mode 100644 index 0000000..00bc655 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/client.py @@ -0,0 +1,705 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.workflows.executions_v1.services.executions import pagers +from google.cloud.workflows.executions_v1.types import executions +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import ExecutionsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ExecutionsGrpcTransport +from .transports.grpc_asyncio import ExecutionsGrpcAsyncIOTransport + + +class ExecutionsClientMeta(type): + """Metaclass for the Executions client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ExecutionsTransport]] + _transport_registry["grpc"] = ExecutionsGrpcTransport + _transport_registry["grpc_asyncio"] = ExecutionsGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[ExecutionsTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ExecutionsClient(metaclass=ExecutionsClientMeta): + """Executions is used to start and manage running instances of + [Workflows][google.cloud.workflows.v1.Workflow] called executions. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "workflowexecutions.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExecutionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ExecutionsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ExecutionsTransport: + """Return the transport used by the client instance. + + Returns: + ExecutionsTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def execution_path( + project: str, location: str, workflow: str, execution: str, + ) -> str: + """Return a fully-qualified execution string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}".format( + project=project, location=location, workflow=workflow, execution=execution, + ) + + @staticmethod + def parse_execution_path(path: str) -> Dict[str, str]: + """Parse a execution path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)/executions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workflow_path(project: str, location: str, workflow: str,) -> str: + """Return a fully-qualified workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + + @staticmethod + def parse_workflow_path(path: str) -> Dict[str, str]: + """Parse a workflow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, ExecutionsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the executions client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ExecutionsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ExecutionsTransport): + # transport is a ExecutionsTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_executions( + self, + request: executions.ListExecutionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExecutionsPager: + r"""Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + Args: + request (google.cloud.workflows.executions_v1.types.ListExecutionsRequest): + The request object. Request for the + [ListExecutions][] + method. + parent (str): + Required. Name of the workflow for + which the executions should be listed. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.services.executions.pagers.ListExecutionsPager: + Response for the + [ListExecutions][google.cloud.workflows.executions.v1.Executions.ListExecutions] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a executions.ListExecutionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, executions.ListExecutionsRequest): + request = executions.ListExecutionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_executions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExecutionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_execution( + self, + request: executions.CreateExecutionRequest = None, + *, + parent: str = None, + execution: executions.Execution = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Creates a new execution using the latest revision of + the given workflow. + + Args: + request (google.cloud.workflows.executions_v1.types.CreateExecutionRequest): + The request object. Request for the + [CreateExecution][google.cloud.workflows.executions.v1.Executions.CreateExecution] + method. + parent (str): + Required. Name of the workflow for + which an execution should be created. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + The latest revision of the workflow will + be used. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + execution (google.cloud.workflows.executions_v1.types.Execution): + Required. Execution to be created. + This corresponds to the ``execution`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, execution]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a executions.CreateExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, executions.CreateExecutionRequest): + request = executions.CreateExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if execution is not None: + request.execution = execution + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_execution( + self, + request: executions.GetExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Returns an execution of the given name. + + Args: + request (google.cloud.workflows.executions_v1.types.GetExecutionRequest): + The request object. Request for the + [GetExecution][google.cloud.workflows.executions.v1.Executions.GetExecution] + method. + name (str): + Required. Name of the execution to be + retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a executions.GetExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, executions.GetExecutionRequest): + request = executions.GetExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_execution( + self, + request: executions.CancelExecutionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> executions.Execution: + r"""Cancels an execution of the given name. + + Args: + request (google.cloud.workflows.executions_v1.types.CancelExecutionRequest): + The request object. Request for the + [CancelExecution][google.cloud.workflows.executions.v1.Executions.CancelExecution] + method. + name (str): + Required. Name of the execution to be + cancelled. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows.executions_v1.types.Execution: + A running instance of a + [Workflow](/workflows/docs/reference/rest/v1/projects.locations.workflows). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a executions.CancelExecutionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, executions.CancelExecutionRequest): + request = executions.CancelExecutionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_execution] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ExecutionsClient",) diff --git a/google/cloud/workflows/executions_v1/services/executions/pagers.py b/google/cloud/workflows/executions_v1/services/executions/pagers.py new file mode 100644 index 0000000..fd31ddd --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.workflows.executions_v1.types import executions + + +class ListExecutionsPager: + """A pager for iterating through ``list_executions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workflows.executions_v1.types.ListExecutionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``executions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExecutions`` requests and continue to iterate + through the ``executions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workflows.executions_v1.types.ListExecutionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., executions.ListExecutionsResponse], + request: executions.ListExecutionsRequest, + response: executions.ListExecutionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workflows.executions_v1.types.ListExecutionsRequest): + The initial request object. + response (google.cloud.workflows.executions_v1.types.ListExecutionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = executions.ListExecutionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[executions.ListExecutionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[executions.Execution]: + for page in self.pages: + yield from page.executions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExecutionsAsyncPager: + """A pager for iterating through ``list_executions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workflows.executions_v1.types.ListExecutionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``executions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExecutions`` requests and continue to iterate + through the ``executions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workflows.executions_v1.types.ListExecutionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[executions.ListExecutionsResponse]], + request: executions.ListExecutionsRequest, + response: executions.ListExecutionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workflows.executions_v1.types.ListExecutionsRequest): + The initial request object. + response (google.cloud.workflows.executions_v1.types.ListExecutionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = executions.ListExecutionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[executions.ListExecutionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[executions.Execution]: + async def async_generator(): + async for page in self.pages: + for response in page.executions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/workflows/executions_v1/services/executions/transports/__init__.py b/google/cloud/workflows/executions_v1/services/executions/transports/__init__.py new file mode 100644 index 0000000..abab840 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import ExecutionsTransport +from .grpc import ExecutionsGrpcTransport +from .grpc_asyncio import ExecutionsGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ExecutionsTransport]] +_transport_registry["grpc"] = ExecutionsGrpcTransport +_transport_registry["grpc_asyncio"] = ExecutionsGrpcAsyncIOTransport + +__all__ = ( + "ExecutionsTransport", + "ExecutionsGrpcTransport", + "ExecutionsGrpcAsyncIOTransport", +) diff --git a/google/cloud/workflows/executions_v1/services/executions/transports/base.py b/google/cloud/workflows/executions_v1/services/executions/transports/base.py new file mode 100644 index 0000000..5cf9562 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/transports/base.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.workflows.executions_v1.types import executions + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflow",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class ExecutionsTransport(abc.ABC): + """Abstract transport class for Executions.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "workflowexecutions.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_executions: gapic_v1.method.wrap_method( + self.list_executions, default_timeout=None, client_info=client_info, + ), + self.create_execution: gapic_v1.method.wrap_method( + self.create_execution, default_timeout=None, client_info=client_info, + ), + self.get_execution: gapic_v1.method.wrap_method( + self.get_execution, default_timeout=None, client_info=client_info, + ), + self.cancel_execution: gapic_v1.method.wrap_method( + self.cancel_execution, default_timeout=None, client_info=client_info, + ), + } + + @property + def list_executions( + self, + ) -> typing.Callable[ + [executions.ListExecutionsRequest], + typing.Union[ + executions.ListExecutionsResponse, + typing.Awaitable[executions.ListExecutionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_execution( + self, + ) -> typing.Callable[ + [executions.CreateExecutionRequest], + typing.Union[executions.Execution, typing.Awaitable[executions.Execution]], + ]: + raise NotImplementedError() + + @property + def get_execution( + self, + ) -> typing.Callable[ + [executions.GetExecutionRequest], + typing.Union[executions.Execution, typing.Awaitable[executions.Execution]], + ]: + raise NotImplementedError() + + @property + def cancel_execution( + self, + ) -> typing.Callable[ + [executions.CancelExecutionRequest], + typing.Union[executions.Execution, typing.Awaitable[executions.Execution]], + ]: + raise NotImplementedError() + + +__all__ = ("ExecutionsTransport",) diff --git a/google/cloud/workflows/executions_v1/services/executions/transports/grpc.py b/google/cloud/workflows/executions_v1/services/executions/transports/grpc.py new file mode 100644 index 0000000..205e953 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/transports/grpc.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.workflows.executions_v1.types import executions + +from .base import ExecutionsTransport, DEFAULT_CLIENT_INFO + + +class ExecutionsGrpcTransport(ExecutionsTransport): + """gRPC backend transport for Executions. + + Executions is used to start and manage running instances of + [Workflows][google.cloud.workflows.v1.Workflow] called executions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "workflowexecutions.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "workflowexecutions.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_executions( + self, + ) -> Callable[ + [executions.ListExecutionsRequest], executions.ListExecutionsResponse + ]: + r"""Return a callable for the list executions method over gRPC. + + Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + Returns: + Callable[[~.ListExecutionsRequest], + ~.ListExecutionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_executions" not in self._stubs: + self._stubs["list_executions"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/ListExecutions", + request_serializer=executions.ListExecutionsRequest.serialize, + response_deserializer=executions.ListExecutionsResponse.deserialize, + ) + return self._stubs["list_executions"] + + @property + def create_execution( + self, + ) -> Callable[[executions.CreateExecutionRequest], executions.Execution]: + r"""Return a callable for the create execution method over gRPC. + + Creates a new execution using the latest revision of + the given workflow. + + Returns: + Callable[[~.CreateExecutionRequest], + ~.Execution]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_execution" not in self._stubs: + self._stubs["create_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/CreateExecution", + request_serializer=executions.CreateExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["create_execution"] + + @property + def get_execution( + self, + ) -> Callable[[executions.GetExecutionRequest], executions.Execution]: + r"""Return a callable for the get execution method over gRPC. + + Returns an execution of the given name. + + Returns: + Callable[[~.GetExecutionRequest], + ~.Execution]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_execution" not in self._stubs: + self._stubs["get_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/GetExecution", + request_serializer=executions.GetExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["get_execution"] + + @property + def cancel_execution( + self, + ) -> Callable[[executions.CancelExecutionRequest], executions.Execution]: + r"""Return a callable for the cancel execution method over gRPC. + + Cancels an execution of the given name. + + Returns: + Callable[[~.CancelExecutionRequest], + ~.Execution]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_execution" not in self._stubs: + self._stubs["cancel_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/CancelExecution", + request_serializer=executions.CancelExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["cancel_execution"] + + +__all__ = ("ExecutionsGrpcTransport",) diff --git a/google/cloud/workflows/executions_v1/services/executions/transports/grpc_asyncio.py b/google/cloud/workflows/executions_v1/services/executions/transports/grpc_asyncio.py new file mode 100644 index 0000000..930a549 --- /dev/null +++ b/google/cloud/workflows/executions_v1/services/executions/transports/grpc_asyncio.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.workflows.executions_v1.types import executions + +from .base import ExecutionsTransport, DEFAULT_CLIENT_INFO +from .grpc import ExecutionsGrpcTransport + + +class ExecutionsGrpcAsyncIOTransport(ExecutionsTransport): + """gRPC AsyncIO backend transport for Executions. + + Executions is used to start and manage running instances of + [Workflows][google.cloud.workflows.v1.Workflow] called executions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "workflowexecutions.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "workflowexecutions.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_executions( + self, + ) -> Callable[ + [executions.ListExecutionsRequest], Awaitable[executions.ListExecutionsResponse] + ]: + r"""Return a callable for the list executions method over gRPC. + + Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + Returns: + Callable[[~.ListExecutionsRequest], + Awaitable[~.ListExecutionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_executions" not in self._stubs: + self._stubs["list_executions"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/ListExecutions", + request_serializer=executions.ListExecutionsRequest.serialize, + response_deserializer=executions.ListExecutionsResponse.deserialize, + ) + return self._stubs["list_executions"] + + @property + def create_execution( + self, + ) -> Callable[[executions.CreateExecutionRequest], Awaitable[executions.Execution]]: + r"""Return a callable for the create execution method over gRPC. + + Creates a new execution using the latest revision of + the given workflow. + + Returns: + Callable[[~.CreateExecutionRequest], + Awaitable[~.Execution]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_execution" not in self._stubs: + self._stubs["create_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/CreateExecution", + request_serializer=executions.CreateExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["create_execution"] + + @property + def get_execution( + self, + ) -> Callable[[executions.GetExecutionRequest], Awaitable[executions.Execution]]: + r"""Return a callable for the get execution method over gRPC. + + Returns an execution of the given name. + + Returns: + Callable[[~.GetExecutionRequest], + Awaitable[~.Execution]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_execution" not in self._stubs: + self._stubs["get_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/GetExecution", + request_serializer=executions.GetExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["get_execution"] + + @property + def cancel_execution( + self, + ) -> Callable[[executions.CancelExecutionRequest], Awaitable[executions.Execution]]: + r"""Return a callable for the cancel execution method over gRPC. + + Cancels an execution of the given name. + + Returns: + Callable[[~.CancelExecutionRequest], + Awaitable[~.Execution]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_execution" not in self._stubs: + self._stubs["cancel_execution"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.executions.v1.Executions/CancelExecution", + request_serializer=executions.CancelExecutionRequest.serialize, + response_deserializer=executions.Execution.deserialize, + ) + return self._stubs["cancel_execution"] + + +__all__ = ("ExecutionsGrpcAsyncIOTransport",) diff --git a/google/cloud/workflows/executions_v1/types/__init__.py b/google/cloud/workflows/executions_v1/types/__init__.py new file mode 100644 index 0000000..3ea6e8b --- /dev/null +++ b/google/cloud/workflows/executions_v1/types/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .executions import ( + CancelExecutionRequest, + CreateExecutionRequest, + Execution, + GetExecutionRequest, + ListExecutionsRequest, + ListExecutionsResponse, + ExecutionView, +) + +__all__ = ( + "CancelExecutionRequest", + "CreateExecutionRequest", + "Execution", + "GetExecutionRequest", + "ListExecutionsRequest", + "ListExecutionsResponse", + "ExecutionView", +) diff --git a/google/cloud/workflows/executions_v1/types/executions.py b/google/cloud/workflows/executions_v1/types/executions.py new file mode 100644 index 0000000..d0ca7a1 --- /dev/null +++ b/google/cloud/workflows/executions_v1/types/executions.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.workflows.executions.v1", + manifest={ + "ExecutionView", + "Execution", + "ListExecutionsRequest", + "ListExecutionsResponse", + "CreateExecutionRequest", + "GetExecutionRequest", + "CancelExecutionRequest", + }, +) + + +class ExecutionView(proto.Enum): + r"""Defines possible views for execution resource.""" + EXECUTION_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Execution(proto.Message): + r"""A running instance of a + `Workflow `__. + + Attributes: + name (str): + Output only. The resource name of the + execution. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Marks the beginning of + execution. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Marks the end of execution, + successful or not. + state (google.cloud.workflows.executions_v1.types.Execution.State): + Output only. Current state of the execution. + argument (str): + Input parameters of the execution represented + as a JSON string. The size limit is 32KB. + result (str): + Output only. Output of the execution represented as a JSON + string. The value can only be present if the execution's + state is ``SUCCEEDED``. + error (google.cloud.workflows.executions_v1.types.Execution.Error): + Output only. The error which caused the execution to finish + prematurely. The value is only present if the execution's + state is ``FAILED`` or ``CANCELLED``. + workflow_revision_id (str): + Output only. Revision of the workflow this + execution is using. + """ + + class State(proto.Enum): + r"""Describes the current state of the execution. More states may + be added in the future. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLED = 4 + + class Error(proto.Message): + r"""Error describes why the execution was abnormally terminated. + + Attributes: + payload (str): + Error payload returned by the execution, + represented as a JSON string. + context (str): + Human readable error context, helpful for + debugging purposes. + """ + + payload = proto.Field(proto.STRING, number=1) + + context = proto.Field(proto.STRING, number=2) + + name = proto.Field(proto.STRING, number=1) + + start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=4, enum=State,) + + argument = proto.Field(proto.STRING, number=5) + + result = proto.Field(proto.STRING, number=6) + + error = proto.Field(proto.MESSAGE, number=7, message=Error,) + + workflow_revision_id = proto.Field(proto.STRING, number=8) + + +class ListExecutionsRequest(proto.Message): + r"""Request for the [ListExecutions][] method. + + Attributes: + parent (str): + Required. Name of the workflow for which the + executions should be listed. Format: + projects/{project}/locations/{location}/workflows/{workflow} + page_size (int): + Maximum number of executions to return per + call. Max supported value depends on the + selected Execution view: it's 10000 for BASIC + and 100 for FULL. The default value used if the + field is not specified is 100, regardless of the + selected view. Values greater than the max value + will be coerced down to it. + page_token (str): + A page token, received from a previous ``ListExecutions`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListExecutions`` must match the call that provided the + page token. + view (google.cloud.workflows.executions_v1.types.ExecutionView): + Optional. A view defining which fields should + be filled in the returned executions. The API + will default to the BASIC view. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + view = proto.Field(proto.ENUM, number=4, enum="ExecutionView",) + + +class ListExecutionsResponse(proto.Message): + r"""Response for the + [ListExecutions][google.cloud.workflows.executions.v1.Executions.ListExecutions] + method. + + Attributes: + executions (Sequence[google.cloud.workflows.executions_v1.types.Execution]): + The executions which match the request. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + executions = proto.RepeatedField(proto.MESSAGE, number=1, message="Execution",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateExecutionRequest(proto.Message): + r"""Request for the + [CreateExecution][google.cloud.workflows.executions.v1.Executions.CreateExecution] + method. + + Attributes: + parent (str): + Required. Name of the workflow for which an + execution should be created. Format: + projects/{project}/locations/{location}/workflows/{workflow} + The latest revision of the workflow will be + used. + execution (google.cloud.workflows.executions_v1.types.Execution): + Required. Execution to be created. + """ + + parent = proto.Field(proto.STRING, number=1) + + execution = proto.Field(proto.MESSAGE, number=2, message="Execution",) + + +class GetExecutionRequest(proto.Message): + r"""Request for the + [GetExecution][google.cloud.workflows.executions.v1.Executions.GetExecution] + method. + + Attributes: + name (str): + Required. Name of the execution to be + retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + view (google.cloud.workflows.executions_v1.types.ExecutionView): + Optional. A view defining which fields should + be filled in the returned execution. The API + will default to the FULL view. + """ + + name = proto.Field(proto.STRING, number=1) + + view = proto.Field(proto.ENUM, number=2, enum="ExecutionView",) + + +class CancelExecutionRequest(proto.Message): + r"""Request for the + [CancelExecution][google.cloud.workflows.executions.v1.Executions.CancelExecution] + method. + + Attributes: + name (str): + Required. Name of the execution to be + cancelled. Format: + projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution} + """ + + name = proto.Field(proto.STRING, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/workflows_v1/__init__.py b/google/cloud/workflows_v1/__init__.py new file mode 100644 index 0000000..91f1cc1 --- /dev/null +++ b/google/cloud/workflows_v1/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.workflows import WorkflowsClient +from .types.workflows import CreateWorkflowRequest +from .types.workflows import DeleteWorkflowRequest +from .types.workflows import GetWorkflowRequest +from .types.workflows import ListWorkflowsRequest +from .types.workflows import ListWorkflowsResponse +from .types.workflows import OperationMetadata +from .types.workflows import UpdateWorkflowRequest +from .types.workflows import Workflow + + +__all__ = ( + "CreateWorkflowRequest", + "DeleteWorkflowRequest", + "GetWorkflowRequest", + "ListWorkflowsRequest", + "ListWorkflowsResponse", + "OperationMetadata", + "UpdateWorkflowRequest", + "Workflow", + "WorkflowsClient", +) diff --git a/google/cloud/workflows_v1/py.typed b/google/cloud/workflows_v1/py.typed new file mode 100644 index 0000000..b753e6a --- /dev/null +++ b/google/cloud/workflows_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-workflows package uses inline types. diff --git a/google/cloud/workflows_v1/services/__init__.py b/google/cloud/workflows_v1/services/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/google/cloud/workflows_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/workflows_v1/services/workflows/__init__.py b/google/cloud/workflows_v1/services/workflows/__init__.py new file mode 100644 index 0000000..3afd340 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import WorkflowsClient +from .async_client import WorkflowsAsyncClient + +__all__ = ( + "WorkflowsClient", + "WorkflowsAsyncClient", +) diff --git a/google/cloud/workflows_v1/services/workflows/async_client.py b/google/cloud/workflows_v1/services/workflows/async_client.py new file mode 100644 index 0000000..778f6c5 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/async_client.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.workflows_v1.services.workflows import pagers +from google.cloud.workflows_v1.types import workflows +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import WorkflowsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import WorkflowsGrpcAsyncIOTransport +from .client import WorkflowsClient + + +class WorkflowsAsyncClient: + """Workflows is used to deploy and execute workflow programs. + Workflows makes sure the program executes reliably, despite + hardware and networking interruptions. + """ + + _client: WorkflowsClient + + DEFAULT_ENDPOINT = WorkflowsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = WorkflowsClient.DEFAULT_MTLS_ENDPOINT + + workflow_path = staticmethod(WorkflowsClient.workflow_path) + parse_workflow_path = staticmethod(WorkflowsClient.parse_workflow_path) + + common_billing_account_path = staticmethod( + WorkflowsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WorkflowsClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(WorkflowsClient.common_folder_path) + parse_common_folder_path = staticmethod(WorkflowsClient.parse_common_folder_path) + + common_organization_path = staticmethod(WorkflowsClient.common_organization_path) + parse_common_organization_path = staticmethod( + WorkflowsClient.parse_common_organization_path + ) + + common_project_path = staticmethod(WorkflowsClient.common_project_path) + parse_common_project_path = staticmethod(WorkflowsClient.parse_common_project_path) + + common_location_path = staticmethod(WorkflowsClient.common_location_path) + parse_common_location_path = staticmethod( + WorkflowsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowsAsyncClient: The constructed client. + """ + return WorkflowsClient.from_service_account_info.__func__(WorkflowsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowsAsyncClient: The constructed client. + """ + return WorkflowsClient.from_service_account_file.__func__(WorkflowsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WorkflowsTransport: + """Return the transport used by the client instance. + + Returns: + WorkflowsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(WorkflowsClient).get_transport_class, type(WorkflowsClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, WorkflowsTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the workflows client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.WorkflowsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = WorkflowsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_workflows( + self, + request: workflows.ListWorkflowsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkflowsAsyncPager: + r"""Lists Workflows in a given project and location. + The default order is not specified. + + Args: + request (:class:`google.cloud.workflows_v1.types.ListWorkflowsRequest`): + The request object. Request for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + parent (:class:`str`): + Required. Project and location from + which the workflows should be listed. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows_v1.services.workflows.pagers.ListWorkflowsAsyncPager: + Response for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflows.ListWorkflowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workflows, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkflowsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_workflow( + self, + request: workflows.GetWorkflowRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflows.Workflow: + r"""Gets details of a single Workflow. + + Args: + request (:class:`google.cloud.workflows_v1.types.GetWorkflowRequest`): + The request object. Request for the + [GetWorkflow][google.cloud.workflows.v1.Workflows.GetWorkflow] + method. + name (:class:`str`): + Required. Name of the workflow which + information should be retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows_v1.types.Workflow: + Workflow program to be executed by + Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflows.GetWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workflow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_workflow( + self, + request: workflows.CreateWorkflowRequest = None, + *, + parent: str = None, + workflow: workflows.Workflow = None, + workflow_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + Args: + request (:class:`google.cloud.workflows_v1.types.CreateWorkflowRequest`): + The request object. Request for the + [CreateWorkflow][google.cloud.workflows.v1.Workflows.CreateWorkflow] + method. + parent (:class:`str`): + Required. Project and location in + which the workflow should be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workflow (:class:`google.cloud.workflows_v1.types.Workflow`): + Required. Workflow to be created. + This corresponds to the ``workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workflow_id (:class:`str`): + Required. The ID of the workflow to be created. It has + to fulfill the following requirements: + + - Must contain only letters, numbers, underscores and + hyphens. + - Must start with a letter. + - Must be between 1-64 characters. + - Must end with a number or a letter. + - Must be unique within the customer project and + location. + + This corresponds to the ``workflow_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workflows_v1.types.Workflow` + Workflow program to be executed by Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workflow, workflow_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflows.CreateWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if workflow is not None: + request.workflow = workflow + if workflow_id is not None: + request.workflow_id = workflow_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workflow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workflows.Workflow, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_workflow( + self, + request: workflows.DeleteWorkflowRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + Args: + request (:class:`google.cloud.workflows_v1.types.DeleteWorkflowRequest`): + The request object. Request for the + [DeleteWorkflow][google.cloud.workflows.v1.Workflows.DeleteWorkflow] + method. + name (:class:`str`): + Required. Name of the workflow to be + deleted. Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflows.DeleteWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workflow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty.Empty, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_workflow( + self, + request: workflows.UpdateWorkflowRequest = None, + *, + workflow: workflows.Workflow = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + Args: + request (:class:`google.cloud.workflows_v1.types.UpdateWorkflowRequest`): + The request object. Request for the + [UpdateWorkflow][google.cloud.workflows.v1.Workflows.UpdateWorkflow] + method. + workflow (:class:`google.cloud.workflows_v1.types.Workflow`): + Required. Workflow to be updated. + This corresponds to the ``workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + List of fields to be updated. If not + present, the entire workflow will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workflows_v1.types.Workflow` + Workflow program to be executed by Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workflow, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflows.UpdateWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if workflow is not None: + request.workflow = workflow + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workflow, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workflow.name", request.workflow.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + workflows.Workflow, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflows",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("WorkflowsAsyncClient",) diff --git a/google/cloud/workflows_v1/services/workflows/client.py b/google/cloud/workflows_v1/services/workflows/client.py new file mode 100644 index 0000000..aab2e48 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/client.py @@ -0,0 +1,843 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.workflows_v1.services.workflows import pagers +from google.cloud.workflows_v1.types import workflows +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import WorkflowsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import WorkflowsGrpcTransport +from .transports.grpc_asyncio import WorkflowsGrpcAsyncIOTransport + + +class WorkflowsClientMeta(type): + """Metaclass for the Workflows client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[WorkflowsTransport]] + _transport_registry["grpc"] = WorkflowsGrpcTransport + _transport_registry["grpc_asyncio"] = WorkflowsGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[WorkflowsTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class WorkflowsClient(metaclass=WorkflowsClientMeta): + """Workflows is used to deploy and execute workflow programs. + Workflows makes sure the program executes reliably, despite + hardware and networking interruptions. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "workflows.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WorkflowsTransport: + """Return the transport used by the client instance. + + Returns: + WorkflowsTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def workflow_path(project: str, location: str, workflow: str,) -> str: + """Return a fully-qualified workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + + @staticmethod + def parse_workflow_path(path: str) -> Dict[str, str]: + """Parse a workflow path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, WorkflowsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the workflows client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, WorkflowsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, WorkflowsTransport): + # transport is a WorkflowsTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_workflows( + self, + request: workflows.ListWorkflowsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkflowsPager: + r"""Lists Workflows in a given project and location. + The default order is not specified. + + Args: + request (google.cloud.workflows_v1.types.ListWorkflowsRequest): + The request object. Request for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + parent (str): + Required. Project and location from + which the workflows should be listed. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows_v1.services.workflows.pagers.ListWorkflowsPager: + Response for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflows.ListWorkflowsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflows.ListWorkflowsRequest): + request = workflows.ListWorkflowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workflows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkflowsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_workflow( + self, + request: workflows.GetWorkflowRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflows.Workflow: + r"""Gets details of a single Workflow. + + Args: + request (google.cloud.workflows_v1.types.GetWorkflowRequest): + The request object. Request for the + [GetWorkflow][google.cloud.workflows.v1.Workflows.GetWorkflow] + method. + name (str): + Required. Name of the workflow which + information should be retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.workflows_v1.types.Workflow: + Workflow program to be executed by + Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflows.GetWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflows.GetWorkflowRequest): + request = workflows.GetWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_workflow( + self, + request: workflows.CreateWorkflowRequest = None, + *, + parent: str = None, + workflow: workflows.Workflow = None, + workflow_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + Args: + request (google.cloud.workflows_v1.types.CreateWorkflowRequest): + The request object. Request for the + [CreateWorkflow][google.cloud.workflows.v1.Workflows.CreateWorkflow] + method. + parent (str): + Required. Project and location in + which the workflow should be created. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workflow (google.cloud.workflows_v1.types.Workflow): + Required. Workflow to be created. + This corresponds to the ``workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + workflow_id (str): + Required. The ID of the workflow to be created. It has + to fulfill the following requirements: + + - Must contain only letters, numbers, underscores and + hyphens. + - Must start with a letter. + - Must be between 1-64 characters. + - Must end with a number or a letter. + - Must be unique within the customer project and + location. + + This corresponds to the ``workflow_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workflows_v1.types.Workflow` + Workflow program to be executed by Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, workflow, workflow_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflows.CreateWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflows.CreateWorkflowRequest): + request = workflows.CreateWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if workflow is not None: + request.workflow = workflow + if workflow_id is not None: + request.workflow_id = workflow_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workflows.Workflow, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_workflow( + self, + request: workflows.DeleteWorkflowRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + Args: + request (google.cloud.workflows_v1.types.DeleteWorkflowRequest): + The request object. Request for the + [DeleteWorkflow][google.cloud.workflows.v1.Workflows.DeleteWorkflow] + method. + name (str): + Required. Name of the workflow to be + deleted. Format: + projects/{project}/locations/{location}/workflows/{workflow} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflows.DeleteWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflows.DeleteWorkflowRequest): + request = workflows.DeleteWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty.Empty, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_workflow( + self, + request: workflows.UpdateWorkflowRequest = None, + *, + workflow: workflows.Workflow = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + Args: + request (google.cloud.workflows_v1.types.UpdateWorkflowRequest): + The request object. Request for the + [UpdateWorkflow][google.cloud.workflows.v1.Workflows.UpdateWorkflow] + method. + workflow (google.cloud.workflows_v1.types.Workflow): + Required. Workflow to be updated. + This corresponds to the ``workflow`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + List of fields to be updated. If not + present, the entire workflow will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.workflows_v1.types.Workflow` + Workflow program to be executed by Workflows. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([workflow, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflows.UpdateWorkflowRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflows.UpdateWorkflowRequest): + request = workflows.UpdateWorkflowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if workflow is not None: + request.workflow = workflow + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workflow] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("workflow.name", request.workflow.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + workflows.Workflow, + metadata_type=workflows.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflows",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("WorkflowsClient",) diff --git a/google/cloud/workflows_v1/services/workflows/pagers.py b/google/cloud/workflows_v1/services/workflows/pagers.py new file mode 100644 index 0000000..bff7b2c --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.workflows_v1.types import workflows + + +class ListWorkflowsPager: + """A pager for iterating through ``list_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workflows_v1.types.ListWorkflowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``workflows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkflows`` requests and continue to iterate + through the ``workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workflows_v1.types.ListWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workflows.ListWorkflowsResponse], + request: workflows.ListWorkflowsRequest, + response: workflows.ListWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workflows_v1.types.ListWorkflowsRequest): + The initial request object. + response (google.cloud.workflows_v1.types.ListWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workflows.ListWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[workflows.ListWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[workflows.Workflow]: + for page in self.pages: + yield from page.workflows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkflowsAsyncPager: + """A pager for iterating through ``list_workflows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.workflows_v1.types.ListWorkflowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``workflows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkflows`` requests and continue to iterate + through the ``workflows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.workflows_v1.types.ListWorkflowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[workflows.ListWorkflowsResponse]], + request: workflows.ListWorkflowsRequest, + response: workflows.ListWorkflowsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.workflows_v1.types.ListWorkflowsRequest): + The initial request object. + response (google.cloud.workflows_v1.types.ListWorkflowsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workflows.ListWorkflowsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[workflows.ListWorkflowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[workflows.Workflow]: + async def async_generator(): + async for page in self.pages: + for response in page.workflows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/workflows_v1/services/workflows/transports/__init__.py b/google/cloud/workflows_v1/services/workflows/transports/__init__.py new file mode 100644 index 0000000..ceeb985 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import WorkflowsTransport +from .grpc import WorkflowsGrpcTransport +from .grpc_asyncio import WorkflowsGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[WorkflowsTransport]] +_transport_registry["grpc"] = WorkflowsGrpcTransport +_transport_registry["grpc_asyncio"] = WorkflowsGrpcAsyncIOTransport + +__all__ = ( + "WorkflowsTransport", + "WorkflowsGrpcTransport", + "WorkflowsGrpcAsyncIOTransport", +) diff --git a/google/cloud/workflows_v1/services/workflows/transports/base.py b/google/cloud/workflows_v1/services/workflows/transports/base.py new file mode 100644 index 0000000..2b54f1c --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.workflows_v1.types import workflows +from google.longrunning import operations_pb2 as operations # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-workflows",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class WorkflowsTransport(abc.ABC): + """Abstract transport class for Workflows.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "workflows.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=self._scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_workflows: gapic_v1.method.wrap_method( + self.list_workflows, default_timeout=None, client_info=client_info, + ), + self.get_workflow: gapic_v1.method.wrap_method( + self.get_workflow, default_timeout=None, client_info=client_info, + ), + self.create_workflow: gapic_v1.method.wrap_method( + self.create_workflow, default_timeout=None, client_info=client_info, + ), + self.delete_workflow: gapic_v1.method.wrap_method( + self.delete_workflow, default_timeout=None, client_info=client_info, + ), + self.update_workflow: gapic_v1.method.wrap_method( + self.update_workflow, default_timeout=None, client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_workflows( + self, + ) -> typing.Callable[ + [workflows.ListWorkflowsRequest], + typing.Union[ + workflows.ListWorkflowsResponse, + typing.Awaitable[workflows.ListWorkflowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_workflow( + self, + ) -> typing.Callable[ + [workflows.GetWorkflowRequest], + typing.Union[workflows.Workflow, typing.Awaitable[workflows.Workflow]], + ]: + raise NotImplementedError() + + @property + def create_workflow( + self, + ) -> typing.Callable[ + [workflows.CreateWorkflowRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_workflow( + self, + ) -> typing.Callable[ + [workflows.DeleteWorkflowRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workflow( + self, + ) -> typing.Callable[ + [workflows.UpdateWorkflowRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("WorkflowsTransport",) diff --git a/google/cloud/workflows_v1/services/workflows/transports/grpc.py b/google/cloud/workflows_v1/services/workflows/transports/grpc.py new file mode 100644 index 0000000..8bf1104 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/transports/grpc.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.workflows_v1.types import workflows +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import WorkflowsTransport, DEFAULT_CLIENT_INFO + + +class WorkflowsGrpcTransport(WorkflowsTransport): + """gRPC backend transport for Workflows. + + Workflows is used to deploy and execute workflow programs. + Workflows makes sure the program executes reliably, despite + hardware and networking interruptions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "workflows.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "workflows.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_workflows( + self, + ) -> Callable[[workflows.ListWorkflowsRequest], workflows.ListWorkflowsResponse]: + r"""Return a callable for the list workflows method over gRPC. + + Lists Workflows in a given project and location. + The default order is not specified. + + Returns: + Callable[[~.ListWorkflowsRequest], + ~.ListWorkflowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workflows" not in self._stubs: + self._stubs["list_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/ListWorkflows", + request_serializer=workflows.ListWorkflowsRequest.serialize, + response_deserializer=workflows.ListWorkflowsResponse.deserialize, + ) + return self._stubs["list_workflows"] + + @property + def get_workflow( + self, + ) -> Callable[[workflows.GetWorkflowRequest], workflows.Workflow]: + r"""Return a callable for the get workflow method over gRPC. + + Gets details of a single Workflow. + + Returns: + Callable[[~.GetWorkflowRequest], + ~.Workflow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workflow" not in self._stubs: + self._stubs["get_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/GetWorkflow", + request_serializer=workflows.GetWorkflowRequest.serialize, + response_deserializer=workflows.Workflow.deserialize, + ) + return self._stubs["get_workflow"] + + @property + def create_workflow( + self, + ) -> Callable[[workflows.CreateWorkflowRequest], operations.Operation]: + r"""Return a callable for the create workflow method over gRPC. + + Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + Returns: + Callable[[~.CreateWorkflowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workflow" not in self._stubs: + self._stubs["create_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/CreateWorkflow", + request_serializer=workflows.CreateWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_workflow"] + + @property + def delete_workflow( + self, + ) -> Callable[[workflows.DeleteWorkflowRequest], operations.Operation]: + r"""Return a callable for the delete workflow method over gRPC. + + Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + Returns: + Callable[[~.DeleteWorkflowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workflow" not in self._stubs: + self._stubs["delete_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/DeleteWorkflow", + request_serializer=workflows.DeleteWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_workflow"] + + @property + def update_workflow( + self, + ) -> Callable[[workflows.UpdateWorkflowRequest], operations.Operation]: + r"""Return a callable for the update workflow method over gRPC. + + Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + Returns: + Callable[[~.UpdateWorkflowRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workflow" not in self._stubs: + self._stubs["update_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/UpdateWorkflow", + request_serializer=workflows.UpdateWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_workflow"] + + +__all__ = ("WorkflowsGrpcTransport",) diff --git a/google/cloud/workflows_v1/services/workflows/transports/grpc_asyncio.py b/google/cloud/workflows_v1/services/workflows/transports/grpc_asyncio.py new file mode 100644 index 0000000..61c0792 --- /dev/null +++ b/google/cloud/workflows_v1/services/workflows/transports/grpc_asyncio.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.workflows_v1.types import workflows +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import WorkflowsTransport, DEFAULT_CLIENT_INFO +from .grpc import WorkflowsGrpcTransport + + +class WorkflowsGrpcAsyncIOTransport(WorkflowsTransport): + """gRPC AsyncIO backend transport for Workflows. + + Workflows is used to deploy and execute workflow programs. + Workflows makes sure the program executes reliably, despite + hardware and networking interruptions. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "workflows.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "workflows.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_workflows( + self, + ) -> Callable[ + [workflows.ListWorkflowsRequest], Awaitable[workflows.ListWorkflowsResponse] + ]: + r"""Return a callable for the list workflows method over gRPC. + + Lists Workflows in a given project and location. + The default order is not specified. + + Returns: + Callable[[~.ListWorkflowsRequest], + Awaitable[~.ListWorkflowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workflows" not in self._stubs: + self._stubs["list_workflows"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/ListWorkflows", + request_serializer=workflows.ListWorkflowsRequest.serialize, + response_deserializer=workflows.ListWorkflowsResponse.deserialize, + ) + return self._stubs["list_workflows"] + + @property + def get_workflow( + self, + ) -> Callable[[workflows.GetWorkflowRequest], Awaitable[workflows.Workflow]]: + r"""Return a callable for the get workflow method over gRPC. + + Gets details of a single Workflow. + + Returns: + Callable[[~.GetWorkflowRequest], + Awaitable[~.Workflow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workflow" not in self._stubs: + self._stubs["get_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/GetWorkflow", + request_serializer=workflows.GetWorkflowRequest.serialize, + response_deserializer=workflows.Workflow.deserialize, + ) + return self._stubs["get_workflow"] + + @property + def create_workflow( + self, + ) -> Callable[[workflows.CreateWorkflowRequest], Awaitable[operations.Operation]]: + r"""Return a callable for the create workflow method over gRPC. + + Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + Returns: + Callable[[~.CreateWorkflowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workflow" not in self._stubs: + self._stubs["create_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/CreateWorkflow", + request_serializer=workflows.CreateWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_workflow"] + + @property + def delete_workflow( + self, + ) -> Callable[[workflows.DeleteWorkflowRequest], Awaitable[operations.Operation]]: + r"""Return a callable for the delete workflow method over gRPC. + + Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + Returns: + Callable[[~.DeleteWorkflowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workflow" not in self._stubs: + self._stubs["delete_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/DeleteWorkflow", + request_serializer=workflows.DeleteWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_workflow"] + + @property + def update_workflow( + self, + ) -> Callable[[workflows.UpdateWorkflowRequest], Awaitable[operations.Operation]]: + r"""Return a callable for the update workflow method over gRPC. + + Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + Returns: + Callable[[~.UpdateWorkflowRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workflow" not in self._stubs: + self._stubs["update_workflow"] = self.grpc_channel.unary_unary( + "/google.cloud.workflows.v1.Workflows/UpdateWorkflow", + request_serializer=workflows.UpdateWorkflowRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_workflow"] + + +__all__ = ("WorkflowsGrpcAsyncIOTransport",) diff --git a/google/cloud/workflows_v1/types/__init__.py b/google/cloud/workflows_v1/types/__init__.py new file mode 100644 index 0000000..b3730e6 --- /dev/null +++ b/google/cloud/workflows_v1/types/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .workflows import ( + CreateWorkflowRequest, + DeleteWorkflowRequest, + GetWorkflowRequest, + ListWorkflowsRequest, + ListWorkflowsResponse, + OperationMetadata, + UpdateWorkflowRequest, + Workflow, +) + +__all__ = ( + "CreateWorkflowRequest", + "DeleteWorkflowRequest", + "GetWorkflowRequest", + "ListWorkflowsRequest", + "ListWorkflowsResponse", + "OperationMetadata", + "UpdateWorkflowRequest", + "Workflow", +) diff --git a/google/cloud/workflows_v1/types/workflows.py b/google/cloud/workflows_v1/types/workflows.py new file mode 100644 index 0000000..26fd9e4 --- /dev/null +++ b/google/cloud/workflows_v1/types/workflows.py @@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.workflows.v1", + manifest={ + "Workflow", + "ListWorkflowsRequest", + "ListWorkflowsResponse", + "GetWorkflowRequest", + "CreateWorkflowRequest", + "DeleteWorkflowRequest", + "UpdateWorkflowRequest", + "OperationMetadata", + }, +) + + +class Workflow(proto.Message): + r"""Workflow program to be executed by Workflows. + + Attributes: + name (str): + The resource name of the workflow. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + description (str): + Description of the workflow provided by the + user. Must be at most 1000 unicode characters + long. + state (google.cloud.workflows_v1.types.Workflow.State): + Output only. State of the workflow + deployment. + revision_id (str): + Output only. The revision of the workflow. A new revision of + a workflow is created as a result of updating the following + fields of a workflow: + + - ``source_code`` + - ``service_account`` The format is "000001-a4d", where the + first 6 characters define the zero-padded revision + ordinal number. They are followed by a hyphen and 3 + hexadecimal random characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of when the + workflow was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + workflow. + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the latest + revision of the workflow was created. + labels (Sequence[google.cloud.workflows_v1.types.Workflow.LabelsEntry]): + Labels associated with this workflow. + Labels can contain at most 64 entries. Keys and + values can be no longer than 63 characters and + can only contain lowercase letters, numeric + characters, underscores and dashes. Label keys + must start with a letter. International + characters are allowed. + service_account (str): + Name of the service account associated with the latest + workflow version. This service account represents the + identity of the workflow and determines what permissions the + workflow has. Format: + projects/{project}/serviceAccounts/{account} + + Using ``-`` as a wildcard for the ``{project}`` will infer + the project from the account. The ``{account}`` value can be + the ``email`` address or the ``unique_id`` of the service + account. + + If not provided, workflow will use the project's default + service account. Modifying this field for an existing + workflow results in a new workflow revision. + source_contents (str): + Workflow code to be executed. The size limit + is 32KB. + """ + + class State(proto.Enum): + r"""Describes the current state of workflow deployment. More + states may be added in the future. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + state = proto.Field(proto.ENUM, number=3, enum=State,) + + revision_id = proto.Field(proto.STRING, number=4) + + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + revision_create_time = proto.Field( + proto.MESSAGE, number=7, message=timestamp.Timestamp, + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=8) + + service_account = proto.Field(proto.STRING, number=9) + + source_contents = proto.Field(proto.STRING, number=10, oneof="source_code") + + +class ListWorkflowsRequest(proto.Message): + r"""Request for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + + Attributes: + parent (str): + Required. Project and location from which the + workflows should be listed. Format: + projects/{project}/locations/{location} + page_size (int): + Maximum number of workflows to return per + call. The service may return fewer than this + value. If the value is not specified, a default + value of 500 will be used. The maximum permitted + value is 1000 and values greater than 1000 will + be coerced down to 1000. + page_token (str): + A page token, received from a previous ``ListWorkflows`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListWorkflows`` must match the call that provided the page + token. + filter (str): + Filter to restrict results to specific + workflows. + order_by (str): + Comma-separated list of fields that that + specify the order of the results. Default + sorting order for a field is ascending. To + specify descending order for a field, append a " + desc" suffix. + If not specified, the results will be returned + in an unspecified order. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=4) + + order_by = proto.Field(proto.STRING, number=5) + + +class ListWorkflowsResponse(proto.Message): + r"""Response for the + [ListWorkflows][google.cloud.workflows.v1.Workflows.ListWorkflows] + method. + + Attributes: + workflows (Sequence[google.cloud.workflows_v1.types.Workflow]): + The workflows which match the request. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (Sequence[str]): + Unreachable resources. + """ + + @property + def raw_page(self): + return self + + workflows = proto.RepeatedField(proto.MESSAGE, number=1, message="Workflow",) + + next_page_token = proto.Field(proto.STRING, number=2) + + unreachable = proto.RepeatedField(proto.STRING, number=3) + + +class GetWorkflowRequest(proto.Message): + r"""Request for the + [GetWorkflow][google.cloud.workflows.v1.Workflows.GetWorkflow] + method. + + Attributes: + name (str): + Required. Name of the workflow which + information should be retrieved. Format: + projects/{project}/locations/{location}/workflows/{workflow} + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateWorkflowRequest(proto.Message): + r"""Request for the + [CreateWorkflow][google.cloud.workflows.v1.Workflows.CreateWorkflow] + method. + + Attributes: + parent (str): + Required. Project and location in which the + workflow should be created. Format: + projects/{project}/locations/{location} + workflow (google.cloud.workflows_v1.types.Workflow): + Required. Workflow to be created. + workflow_id (str): + Required. The ID of the workflow to be created. It has to + fulfill the following requirements: + + - Must contain only letters, numbers, underscores and + hyphens. + - Must start with a letter. + - Must be between 1-64 characters. + - Must end with a number or a letter. + - Must be unique within the customer project and location. + """ + + parent = proto.Field(proto.STRING, number=1) + + workflow = proto.Field(proto.MESSAGE, number=2, message="Workflow",) + + workflow_id = proto.Field(proto.STRING, number=3) + + +class DeleteWorkflowRequest(proto.Message): + r"""Request for the + [DeleteWorkflow][google.cloud.workflows.v1.Workflows.DeleteWorkflow] + method. + + Attributes: + name (str): + Required. Name of the workflow to be deleted. + Format: + projects/{project}/locations/{location}/workflows/{workflow} + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateWorkflowRequest(proto.Message): + r"""Request for the + [UpdateWorkflow][google.cloud.workflows.v1.Workflows.UpdateWorkflow] + method. + + Attributes: + workflow (google.cloud.workflows_v1.types.Workflow): + Required. Workflow to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + List of fields to be updated. If not present, + the entire workflow will be updated. + """ + + workflow = proto.Field(proto.MESSAGE, number=1, message="Workflow",) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation was created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time the operation finished running. + target (str): + Server-defined resource path for the target + of the operation. + verb (str): + Name of the verb executed by the operation. + api_version (str): + API version used to start the operation. + """ + + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + target = proto.Field(proto.STRING, number=3) + + verb = proto.Field(proto.STRING, number=4) + + api_version = proto.Field(proto.STRING, number=5) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/workflows_v1beta/services/workflows/async_client.py b/google/cloud/workflows_v1beta/services/workflows/async_client.py index 8f86b15..ba757c5 100644 --- a/google/cloud/workflows_v1beta/services/workflows/async_client.py +++ b/google/cloud/workflows_v1beta/services/workflows/async_client.py @@ -380,7 +380,6 @@ async def create_workflow( - Must be unique within the customer project and location. - This corresponds to the ``workflow_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/workflows_v1beta/services/workflows/client.py b/google/cloud/workflows_v1beta/services/workflows/client.py index f4bf900..4610a56 100644 --- a/google/cloud/workflows_v1beta/services/workflows/client.py +++ b/google/cloud/workflows_v1beta/services/workflows/client.py @@ -560,7 +560,6 @@ def create_workflow( - Must be unique within the customer project and location. - This corresponds to the ``workflow_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/synth.metadata b/synth.metadata index 56b945f..c4a5369 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,29 +4,29 @@ "git": { "name": ".", "remote": "git@github.com:googleapis/python-workflows", - "sha": "229763479721dd5a5d3686b14add4ac7803ec06e" + "sha": "d377593110a3972ed2a00223d495e65c9ea0e55b" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a1af63efb82f54428ab35ea76869d9cd57ca52b8", - "internalRef": "364635275" + "sha": "3f94707cc14766a6d73c9e23f77be2b95992f59b", + "internalRef": "365104345" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "bb854b6c048619e3be4e8b8ce8ed10aa74ea78ef" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "86ed43d4f56e6404d068e62e497029018879c771" + "sha": "bb854b6c048619e3be4e8b8ce8ed10aa74ea78ef" } } ], @@ -35,7 +35,16 @@ "client": { "source": "googleapis", "apiName": "workflows_executions", - "apiVersion": "v1beta1", + "apiVersion": "v1beta", + "language": "python", + "generator": "bazel" + } + }, + { + "client": { + "source": "googleapis", + "apiName": "workflows_executions", + "apiVersion": "v1", "language": "python", "generator": "bazel" } @@ -48,6 +57,15 @@ "language": "python", "generator": "bazel" } + }, + { + "client": { + "source": "googleapis", + "apiName": "workflows", + "apiVersion": "v1", + "language": "python", + "generator": "bazel" + } } ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 76545e1..8d17bd6 100644 --- a/synth.py +++ b/synth.py @@ -22,51 +22,50 @@ gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() +# This library ships clients for two different APIs, +# Workflows and Workflows Executions +workflows_versions = ["v1beta", "v1"] +workflows_executions_versions = ["v1beta", "v1"] + # ---------------------------------------------------------------------------- # Generate workflows and workflows executions GAPIC layer # ---------------------------------------------------------------------------- -library = gapic.py_library( - service="workflows_executions", - version="v1beta1", - bazel_target="//google/cloud/workflows/executions/v1beta:workflows-executions-v1beta-py", -) +for version in workflows_executions_versions: + library = gapic.py_library( + service="workflows_executions", + version=version, + bazel_target=f"//google/cloud/workflows/executions/{version}:workflows-executions-{version}-py", + ) -s.move( - library, - excludes=[ - "setup.py", - "README.rst", - "docs/index.rst", - "scripts/fixup_executions_v1beta_keywords.py", - ], -) + s.move( + library, + excludes=[ + "setup.py", + "README.rst", + "docs/index.rst", + f"scripts/fixup_executions_{version}_keywords.py", + ], + ) -# move workflows after executions, since we want to use "workflows" for the name -library = gapic.py_library( - service="workflows", - version="v1beta", - bazel_target="//google/cloud/workflows/v1beta:workflows-v1beta-py", -) +for version in workflows_versions: + # move workflows after executions, since we want to use "workflows" for the name + library = gapic.py_library( + service="workflows", + version=version, + bazel_target=f"//google/cloud/workflows/{version}:workflows-{version}-py", + ) -s.move( - library, - excludes=[ - "setup.py", - "README.rst", - "docs/index.rst", - "scripts/fixup_workflows_v1beta_keywords.py", - ], -) + s.move( + library, + excludes=[ + "setup.py", + "README.rst", + "docs/index.rst", + f"scripts/fixup_workflows_{version}_keywords.py", + ], + ) -# add empty line after list in docstring -s.replace( - "google/**/*.py", - """- Must be unique within the customer project and - location.""", - """- Must be unique within the customer project and - location.\n""", -) # Make sure this library is named 'google-cloud-workflows' everywhere s.replace( diff --git a/tests/unit/gapic/executions_v1/__init__.py b/tests/unit/gapic/executions_v1/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/tests/unit/gapic/executions_v1/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/executions_v1/test_executions.py b/tests/unit/gapic/executions_v1/test_executions.py new file mode 100644 index 0000000..10ca798 --- /dev/null +++ b/tests/unit/gapic/executions_v1/test_executions.py @@ -0,0 +1,2007 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.workflows.executions_v1.services.executions import ( + ExecutionsAsyncClient, +) +from google.cloud.workflows.executions_v1.services.executions import ExecutionsClient +from google.cloud.workflows.executions_v1.services.executions import pagers +from google.cloud.workflows.executions_v1.services.executions import transports +from google.cloud.workflows.executions_v1.types import executions +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ExecutionsClient._get_default_mtls_endpoint(None) is None + assert ( + ExecutionsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ExecutionsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ExecutionsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ExecutionsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ExecutionsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ExecutionsClient, ExecutionsAsyncClient,]) +def test_executions_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "workflowexecutions.googleapis.com:443" + + +@pytest.mark.parametrize("client_class", [ExecutionsClient, ExecutionsAsyncClient,]) +def test_executions_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "workflowexecutions.googleapis.com:443" + + +def test_executions_client_get_transport_class(): + transport = ExecutionsClient.get_transport_class() + available_transports = [ + transports.ExecutionsGrpcTransport, + ] + assert transport in available_transports + + transport = ExecutionsClient.get_transport_class("grpc") + assert transport == transports.ExecutionsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ExecutionsClient, transports.ExecutionsGrpcTransport, "grpc"), + ( + ExecutionsAsyncClient, + transports.ExecutionsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ExecutionsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ExecutionsClient) +) +@mock.patch.object( + ExecutionsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ExecutionsAsyncClient), +) +def test_executions_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ExecutionsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ExecutionsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ExecutionsClient, transports.ExecutionsGrpcTransport, "grpc", "true"), + ( + ExecutionsAsyncClient, + transports.ExecutionsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ExecutionsClient, transports.ExecutionsGrpcTransport, "grpc", "false"), + ( + ExecutionsAsyncClient, + transports.ExecutionsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ExecutionsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ExecutionsClient) +) +@mock.patch.object( + ExecutionsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ExecutionsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_executions_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ExecutionsClient, transports.ExecutionsGrpcTransport, "grpc"), + ( + ExecutionsAsyncClient, + transports.ExecutionsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_executions_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ExecutionsClient, transports.ExecutionsGrpcTransport, "grpc"), + ( + ExecutionsAsyncClient, + transports.ExecutionsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_executions_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_executions_client_client_options_from_dict(): + with mock.patch( + "google.cloud.workflows.executions_v1.services.executions.transports.ExecutionsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ExecutionsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_executions( + transport: str = "grpc", request_type=executions.ListExecutionsRequest +): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.ListExecutionsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.ListExecutionsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListExecutionsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_executions_from_dict(): + test_list_executions(request_type=dict) + + +def test_list_executions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + client.list_executions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.ListExecutionsRequest() + + +@pytest.mark.asyncio +async def test_list_executions_async( + transport: str = "grpc_asyncio", request_type=executions.ListExecutionsRequest +): + client = ExecutionsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.ListExecutionsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.ListExecutionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExecutionsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_executions_async_from_dict(): + await test_list_executions_async(request_type=dict) + + +def test_list_executions_field_headers(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.ListExecutionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + call.return_value = executions.ListExecutionsResponse() + + client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_executions_field_headers_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.ListExecutionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.ListExecutionsResponse() + ) + + await client.list_executions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_executions_flattened(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.ListExecutionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_executions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_executions_flattened_error(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_executions( + executions.ListExecutionsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_executions_flattened_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.ListExecutionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.ListExecutionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_executions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_executions_flattened_error_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_executions( + executions.ListExecutionsRequest(), parent="parent_value", + ) + + +def test_list_executions_pager(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + executions.ListExecutionsResponse( + executions=[ + executions.Execution(), + executions.Execution(), + executions.Execution(), + ], + next_page_token="abc", + ), + executions.ListExecutionsResponse(executions=[], next_page_token="def",), + executions.ListExecutionsResponse( + executions=[executions.Execution(),], next_page_token="ghi", + ), + executions.ListExecutionsResponse( + executions=[executions.Execution(), executions.Execution(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_executions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, executions.Execution) for i in results) + + +def test_list_executions_pages(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_executions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + executions.ListExecutionsResponse( + executions=[ + executions.Execution(), + executions.Execution(), + executions.Execution(), + ], + next_page_token="abc", + ), + executions.ListExecutionsResponse(executions=[], next_page_token="def",), + executions.ListExecutionsResponse( + executions=[executions.Execution(),], next_page_token="ghi", + ), + executions.ListExecutionsResponse( + executions=[executions.Execution(), executions.Execution(),], + ), + RuntimeError, + ) + pages = list(client.list_executions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_executions_async_pager(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + executions.ListExecutionsResponse( + executions=[ + executions.Execution(), + executions.Execution(), + executions.Execution(), + ], + next_page_token="abc", + ), + executions.ListExecutionsResponse(executions=[], next_page_token="def",), + executions.ListExecutionsResponse( + executions=[executions.Execution(),], next_page_token="ghi", + ), + executions.ListExecutionsResponse( + executions=[executions.Execution(), executions.Execution(),], + ), + RuntimeError, + ) + async_pager = await client.list_executions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, executions.Execution) for i in responses) + + +@pytest.mark.asyncio +async def test_list_executions_async_pages(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_executions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + executions.ListExecutionsResponse( + executions=[ + executions.Execution(), + executions.Execution(), + executions.Execution(), + ], + next_page_token="abc", + ), + executions.ListExecutionsResponse(executions=[], next_page_token="def",), + executions.ListExecutionsResponse( + executions=[executions.Execution(),], next_page_token="ghi", + ), + executions.ListExecutionsResponse( + executions=[executions.Execution(), executions.Execution(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_executions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_execution( + transport: str = "grpc", request_type=executions.CreateExecutionRequest +): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + + response = client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CreateExecutionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +def test_create_execution_from_dict(): + test_create_execution(request_type=dict) + + +def test_create_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + client.create_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CreateExecutionRequest() + + +@pytest.mark.asyncio +async def test_create_execution_async( + transport: str = "grpc_asyncio", request_type=executions.CreateExecutionRequest +): + client = ExecutionsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + ) + + response = await client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CreateExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +@pytest.mark.asyncio +async def test_create_execution_async_from_dict(): + await test_create_execution_async(request_type=dict) + + +def test_create_execution_field_headers(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.CreateExecutionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + call.return_value = executions.Execution() + + client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_execution_field_headers_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.CreateExecutionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + + await client.create_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_execution_flattened(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_execution( + parent="parent_value", execution=executions.Execution(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].execution == executions.Execution(name="name_value") + + +def test_create_execution_flattened_error(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_execution( + executions.CreateExecutionRequest(), + parent="parent_value", + execution=executions.Execution(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_execution_flattened_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_execution( + parent="parent_value", execution=executions.Execution(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].execution == executions.Execution(name="name_value") + + +@pytest.mark.asyncio +async def test_create_execution_flattened_error_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_execution( + executions.CreateExecutionRequest(), + parent="parent_value", + execution=executions.Execution(name="name_value"), + ) + + +def test_get_execution( + transport: str = "grpc", request_type=executions.GetExecutionRequest +): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + + response = client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.GetExecutionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +def test_get_execution_from_dict(): + test_get_execution(request_type=dict) + + +def test_get_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + client.get_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.GetExecutionRequest() + + +@pytest.mark.asyncio +async def test_get_execution_async( + transport: str = "grpc_asyncio", request_type=executions.GetExecutionRequest +): + client = ExecutionsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + ) + + response = await client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.GetExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +@pytest.mark.asyncio +async def test_get_execution_async_from_dict(): + await test_get_execution_async(request_type=dict) + + +def test_get_execution_field_headers(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.GetExecutionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + call.return_value = executions.Execution() + + client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_execution_field_headers_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.GetExecutionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + + await client.get_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_execution_flattened(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_execution_flattened_error(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_execution( + executions.GetExecutionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_execution_flattened_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_execution_flattened_error_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_execution( + executions.GetExecutionRequest(), name="name_value", + ) + + +def test_cancel_execution( + transport: str = "grpc", request_type=executions.CancelExecutionRequest +): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + + response = client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CancelExecutionRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +def test_cancel_execution_from_dict(): + test_cancel_execution(request_type=dict) + + +def test_cancel_execution_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + client.cancel_execution() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CancelExecutionRequest() + + +@pytest.mark.asyncio +async def test_cancel_execution_async( + transport: str = "grpc_asyncio", request_type=executions.CancelExecutionRequest +): + client = ExecutionsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution( + name="name_value", + state=executions.Execution.State.ACTIVE, + argument="argument_value", + result="result_value", + workflow_revision_id="workflow_revision_id_value", + ) + ) + + response = await client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == executions.CancelExecutionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, executions.Execution) + + assert response.name == "name_value" + + assert response.state == executions.Execution.State.ACTIVE + + assert response.argument == "argument_value" + + assert response.result == "result_value" + + assert response.workflow_revision_id == "workflow_revision_id_value" + + +@pytest.mark.asyncio +async def test_cancel_execution_async_from_dict(): + await test_cancel_execution_async(request_type=dict) + + +def test_cancel_execution_field_headers(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.CancelExecutionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + call.return_value = executions.Execution() + + client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_execution_field_headers_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = executions.CancelExecutionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + + await client.cancel_execution(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_cancel_execution_flattened(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_cancel_execution_flattened_error(): + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_execution( + executions.CancelExecutionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_execution_flattened_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_execution), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = executions.Execution() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + executions.Execution() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_execution(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_cancel_execution_flattened_error_async(): + client = ExecutionsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_execution( + executions.CancelExecutionRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ExecutionsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ExecutionsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExecutionsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ExecutionsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ExecutionsClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ExecutionsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = ExecutionsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ExecutionsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ExecutionsGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [transports.ExecutionsGrpcTransport, transports.ExecutionsGrpcAsyncIOTransport,], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ExecutionsClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ExecutionsGrpcTransport,) + + +def test_executions_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.ExecutionsTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_executions_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.workflows.executions_v1.services.executions.transports.ExecutionsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ExecutionsTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_executions", + "create_execution", + "get_execution", + "cancel_execution", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_executions_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.workflows.executions_v1.services.executions.transports.ExecutionsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.ExecutionsTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_executions_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.workflows.executions_v1.services.executions.transports.ExecutionsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.ExecutionsTransport() + adc.assert_called_once() + + +def test_executions_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + ExecutionsClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_executions_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.ExecutionsGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ExecutionsGrpcTransport, transports.ExecutionsGrpcAsyncIOTransport], +) +def test_executions_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_executions_host_no_port(): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workflowexecutions.googleapis.com" + ), + ) + assert client.transport._host == "workflowexecutions.googleapis.com:443" + + +def test_executions_host_with_port(): + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workflowexecutions.googleapis.com:8000" + ), + ) + assert client.transport._host == "workflowexecutions.googleapis.com:8000" + + +def test_executions_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ExecutionsGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_executions_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ExecutionsGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ExecutionsGrpcTransport, transports.ExecutionsGrpcAsyncIOTransport], +) +def test_executions_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ExecutionsGrpcTransport, transports.ExecutionsGrpcAsyncIOTransport], +) +def test_executions_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_execution_path(): + project = "squid" + location = "clam" + workflow = "whelk" + execution = "octopus" + + expected = "projects/{project}/locations/{location}/workflows/{workflow}/executions/{execution}".format( + project=project, location=location, workflow=workflow, execution=execution, + ) + actual = ExecutionsClient.execution_path(project, location, workflow, execution) + assert expected == actual + + +def test_parse_execution_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "workflow": "cuttlefish", + "execution": "mussel", + } + path = ExecutionsClient.execution_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_execution_path(path) + assert expected == actual + + +def test_workflow_path(): + project = "winkle" + location = "nautilus" + workflow = "scallop" + + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + actual = ExecutionsClient.workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_workflow_path(): + expected = { + "project": "abalone", + "location": "squid", + "workflow": "clam", + } + path = ExecutionsClient.workflow_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_workflow_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ExecutionsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ExecutionsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = ExecutionsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ExecutionsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = ExecutionsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ExecutionsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = ExecutionsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ExecutionsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ExecutionsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ExecutionsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ExecutionsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ExecutionsTransport, "_prep_wrapped_messages" + ) as prep: + client = ExecutionsClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ExecutionsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ExecutionsClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/workflows_v1/__init__.py b/tests/unit/gapic/workflows_v1/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/tests/unit/gapic/workflows_v1/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/workflows_v1/test_workflows.py b/tests/unit/gapic/workflows_v1/test_workflows.py new file mode 100644 index 0000000..671b713 --- /dev/null +++ b/tests/unit/gapic/workflows_v1/test_workflows.py @@ -0,0 +1,2179 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.workflows_v1.services.workflows import WorkflowsAsyncClient +from google.cloud.workflows_v1.services.workflows import WorkflowsClient +from google.cloud.workflows_v1.services.workflows import pagers +from google.cloud.workflows_v1.services.workflows import transports +from google.cloud.workflows_v1.types import workflows +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert WorkflowsClient._get_default_mtls_endpoint(None) is None + assert WorkflowsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + WorkflowsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + WorkflowsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + WorkflowsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert WorkflowsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [WorkflowsClient, WorkflowsAsyncClient,]) +def test_workflows_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "workflows.googleapis.com:443" + + +@pytest.mark.parametrize("client_class", [WorkflowsClient, WorkflowsAsyncClient,]) +def test_workflows_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "workflows.googleapis.com:443" + + +def test_workflows_client_get_transport_class(): + transport = WorkflowsClient.get_transport_class() + available_transports = [ + transports.WorkflowsGrpcTransport, + ] + assert transport in available_transports + + transport = WorkflowsClient.get_transport_class("grpc") + assert transport == transports.WorkflowsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkflowsClient, transports.WorkflowsGrpcTransport, "grpc"), + ( + WorkflowsAsyncClient, + transports.WorkflowsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + WorkflowsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowsClient) +) +@mock.patch.object( + WorkflowsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowsAsyncClient), +) +def test_workflows_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(WorkflowsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(WorkflowsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (WorkflowsClient, transports.WorkflowsGrpcTransport, "grpc", "true"), + ( + WorkflowsAsyncClient, + transports.WorkflowsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (WorkflowsClient, transports.WorkflowsGrpcTransport, "grpc", "false"), + ( + WorkflowsAsyncClient, + transports.WorkflowsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + WorkflowsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowsClient) +) +@mock.patch.object( + WorkflowsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_workflows_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkflowsClient, transports.WorkflowsGrpcTransport, "grpc"), + ( + WorkflowsAsyncClient, + transports.WorkflowsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_workflows_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (WorkflowsClient, transports.WorkflowsGrpcTransport, "grpc"), + ( + WorkflowsAsyncClient, + transports.WorkflowsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_workflows_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_workflows_client_client_options_from_dict(): + with mock.patch( + "google.cloud.workflows_v1.services.workflows.transports.WorkflowsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = WorkflowsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_workflows( + transport: str = "grpc", request_type=workflows.ListWorkflowsRequest +): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.ListWorkflowsResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + + response = client.list_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.ListWorkflowsRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListWorkflowsPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +def test_list_workflows_from_dict(): + test_list_workflows(request_type=dict) + + +def test_list_workflows_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + client.list_workflows() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.ListWorkflowsRequest() + + +@pytest.mark.asyncio +async def test_list_workflows_async( + transport: str = "grpc_asyncio", request_type=workflows.ListWorkflowsRequest +): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflows.ListWorkflowsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + response = await client.list_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.ListWorkflowsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkflowsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_workflows_async_from_dict(): + await test_list_workflows_async(request_type=dict) + + +def test_list_workflows_field_headers(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.ListWorkflowsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + call.return_value = workflows.ListWorkflowsResponse() + + client.list_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workflows_field_headers_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.ListWorkflowsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflows.ListWorkflowsResponse() + ) + + await client.list_workflows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_workflows_flattened(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.ListWorkflowsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workflows(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_workflows_flattened_error(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workflows( + workflows.ListWorkflowsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workflows_flattened_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.ListWorkflowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflows.ListWorkflowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workflows(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_workflows_flattened_error_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workflows( + workflows.ListWorkflowsRequest(), parent="parent_value", + ) + + +def test_list_workflows_pager(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + workflows.ListWorkflowsResponse( + workflows=[ + workflows.Workflow(), + workflows.Workflow(), + workflows.Workflow(), + ], + next_page_token="abc", + ), + workflows.ListWorkflowsResponse(workflows=[], next_page_token="def",), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(),], next_page_token="ghi", + ), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(), workflows.Workflow(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workflows(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, workflows.Workflow) for i in results) + + +def test_list_workflows_pages(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_workflows), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + workflows.ListWorkflowsResponse( + workflows=[ + workflows.Workflow(), + workflows.Workflow(), + workflows.Workflow(), + ], + next_page_token="abc", + ), + workflows.ListWorkflowsResponse(workflows=[], next_page_token="def",), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(),], next_page_token="ghi", + ), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(), workflows.Workflow(),], + ), + RuntimeError, + ) + pages = list(client.list_workflows(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workflows_async_pager(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflows), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflows.ListWorkflowsResponse( + workflows=[ + workflows.Workflow(), + workflows.Workflow(), + workflows.Workflow(), + ], + next_page_token="abc", + ), + workflows.ListWorkflowsResponse(workflows=[], next_page_token="def",), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(),], next_page_token="ghi", + ), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(), workflows.Workflow(),], + ), + RuntimeError, + ) + async_pager = await client.list_workflows(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, workflows.Workflow) for i in responses) + + +@pytest.mark.asyncio +async def test_list_workflows_async_pages(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflows), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflows.ListWorkflowsResponse( + workflows=[ + workflows.Workflow(), + workflows.Workflow(), + workflows.Workflow(), + ], + next_page_token="abc", + ), + workflows.ListWorkflowsResponse(workflows=[], next_page_token="def",), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(),], next_page_token="ghi", + ), + workflows.ListWorkflowsResponse( + workflows=[workflows.Workflow(), workflows.Workflow(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_workflows(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_workflow( + transport: str = "grpc", request_type=workflows.GetWorkflowRequest +): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.Workflow( + name="name_value", + description="description_value", + state=workflows.Workflow.State.ACTIVE, + revision_id="revision_id_value", + service_account="service_account_value", + source_contents="source_contents_value", + ) + + response = client.get_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.GetWorkflowRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, workflows.Workflow) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == workflows.Workflow.State.ACTIVE + + assert response.revision_id == "revision_id_value" + + assert response.service_account == "service_account_value" + + +def test_get_workflow_from_dict(): + test_get_workflow(request_type=dict) + + +def test_get_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + client.get_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.GetWorkflowRequest() + + +@pytest.mark.asyncio +async def test_get_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.GetWorkflowRequest +): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflows.Workflow( + name="name_value", + description="description_value", + state=workflows.Workflow.State.ACTIVE, + revision_id="revision_id_value", + service_account="service_account_value", + ) + ) + + response = await client.get_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.GetWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflows.Workflow) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == workflows.Workflow.State.ACTIVE + + assert response.revision_id == "revision_id_value" + + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_workflow_async_from_dict(): + await test_get_workflow_async(request_type=dict) + + +def test_get_workflow_field_headers(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.GetWorkflowRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + call.return_value = workflows.Workflow() + + client.get_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workflow_field_headers_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.GetWorkflowRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflows.Workflow()) + + await client.get_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_workflow_flattened(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.Workflow() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workflow(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_workflow_flattened_error(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workflow( + workflows.GetWorkflowRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workflow_flattened_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = workflows.Workflow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflows.Workflow()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workflow(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_workflow_flattened_error_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workflow( + workflows.GetWorkflowRequest(), name="name_value", + ) + + +def test_create_workflow( + transport: str = "grpc", request_type=workflows.CreateWorkflowRequest +): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.create_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.CreateWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_workflow_from_dict(): + test_create_workflow(request_type=dict) + + +def test_create_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + client.create_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.CreateWorkflowRequest() + + +@pytest.mark.asyncio +async def test_create_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.CreateWorkflowRequest +): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.create_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.CreateWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_workflow_async_from_dict(): + await test_create_workflow_async(request_type=dict) + + +def test_create_workflow_field_headers(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.CreateWorkflowRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.create_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workflow_field_headers_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.CreateWorkflowRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.create_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_workflow_flattened(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workflow( + parent="parent_value", + workflow=workflows.Workflow(name="name_value"), + workflow_id="workflow_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].workflow == workflows.Workflow(name="name_value") + + assert args[0].workflow_id == "workflow_id_value" + + +def test_create_workflow_flattened_error(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workflow( + workflows.CreateWorkflowRequest(), + parent="parent_value", + workflow=workflows.Workflow(name="name_value"), + workflow_id="workflow_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_workflow_flattened_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workflow( + parent="parent_value", + workflow=workflows.Workflow(name="name_value"), + workflow_id="workflow_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].workflow == workflows.Workflow(name="name_value") + + assert args[0].workflow_id == "workflow_id_value" + + +@pytest.mark.asyncio +async def test_create_workflow_flattened_error_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workflow( + workflows.CreateWorkflowRequest(), + parent="parent_value", + workflow=workflows.Workflow(name="name_value"), + workflow_id="workflow_id_value", + ) + + +def test_delete_workflow( + transport: str = "grpc", request_type=workflows.DeleteWorkflowRequest +): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.delete_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.DeleteWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_workflow_from_dict(): + test_delete_workflow(request_type=dict) + + +def test_delete_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + client.delete_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.DeleteWorkflowRequest() + + +@pytest.mark.asyncio +async def test_delete_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.DeleteWorkflowRequest +): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.delete_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.DeleteWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_workflow_async_from_dict(): + await test_delete_workflow_async(request_type=dict) + + +def test_delete_workflow_field_headers(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.DeleteWorkflowRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.delete_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workflow_field_headers_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.DeleteWorkflowRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.delete_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_workflow_flattened(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workflow(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_workflow_flattened_error(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workflow( + workflows.DeleteWorkflowRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workflow_flattened_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workflow(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_workflow_flattened_error_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workflow( + workflows.DeleteWorkflowRequest(), name="name_value", + ) + + +def test_update_workflow( + transport: str = "grpc", request_type=workflows.UpdateWorkflowRequest +): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.UpdateWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_workflow_from_dict(): + test_update_workflow(request_type=dict) + + +def test_update_workflow_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + client.update_workflow() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.UpdateWorkflowRequest() + + +@pytest.mark.asyncio +async def test_update_workflow_async( + transport: str = "grpc_asyncio", request_type=workflows.UpdateWorkflowRequest +): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == workflows.UpdateWorkflowRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_workflow_async_from_dict(): + await test_update_workflow_async(request_type=dict) + + +def test_update_workflow_field_headers(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.UpdateWorkflowRequest() + request.workflow.name = "workflow.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "workflow.name=workflow.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_workflow_field_headers_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflows.UpdateWorkflowRequest() + request.workflow.name = "workflow.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_workflow(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "workflow.name=workflow.name/value",) in kw[ + "metadata" + ] + + +def test_update_workflow_flattened(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workflow( + workflow=workflows.Workflow(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].workflow == workflows.Workflow(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_workflow_flattened_error(): + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workflow( + workflows.UpdateWorkflowRequest(), + workflow=workflows.Workflow(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_workflow_flattened_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_workflow), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workflow( + workflow=workflows.Workflow(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].workflow == workflows.Workflow(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_workflow_flattened_error_async(): + client = WorkflowsAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workflow( + workflows.UpdateWorkflowRequest(), + workflow=workflows.Workflow(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.WorkflowsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.WorkflowsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.WorkflowsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowsClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkflowsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = WorkflowsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkflowsGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.WorkflowsGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [transports.WorkflowsGrpcTransport, transports.WorkflowsGrpcAsyncIOTransport,], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = WorkflowsClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.WorkflowsGrpcTransport,) + + +def test_workflows_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.WorkflowsTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_workflows_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.workflows_v1.services.workflows.transports.WorkflowsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.WorkflowsTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_workflows", + "get_workflow", + "create_workflow", + "delete_workflow", + "update_workflow", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_workflows_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.workflows_v1.services.workflows.transports.WorkflowsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.WorkflowsTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_workflows_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.workflows_v1.services.workflows.transports.WorkflowsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.WorkflowsTransport() + adc.assert_called_once() + + +def test_workflows_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + WorkflowsClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_workflows_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.WorkflowsGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.WorkflowsGrpcTransport, transports.WorkflowsGrpcAsyncIOTransport], +) +def test_workflows_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_workflows_host_no_port(): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workflows.googleapis.com" + ), + ) + assert client.transport._host == "workflows.googleapis.com:443" + + +def test_workflows_host_with_port(): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="workflows.googleapis.com:8000" + ), + ) + assert client.transport._host == "workflows.googleapis.com:8000" + + +def test_workflows_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkflowsGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_workflows_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkflowsGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkflowsGrpcTransport, transports.WorkflowsGrpcAsyncIOTransport], +) +def test_workflows_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.WorkflowsGrpcTransport, transports.WorkflowsGrpcAsyncIOTransport], +) +def test_workflows_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_workflows_grpc_lro_client(): + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workflows_grpc_lro_async_client(): + client = WorkflowsAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workflow_path(): + project = "squid" + location = "clam" + workflow = "whelk" + + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, location=location, workflow=workflow, + ) + actual = WorkflowsClient.workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_workflow_path(): + expected = { + "project": "octopus", + "location": "oyster", + "workflow": "nudibranch", + } + path = WorkflowsClient.workflow_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_workflow_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = WorkflowsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = WorkflowsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = WorkflowsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = WorkflowsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = WorkflowsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = WorkflowsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = WorkflowsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = WorkflowsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = WorkflowsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = WorkflowsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.WorkflowsTransport, "_prep_wrapped_messages" + ) as prep: + client = WorkflowsClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.WorkflowsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = WorkflowsClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)