diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..fc281c0 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.kokoro/build.sh b/.kokoro/build.sh index b96af36..7640bf3 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-memcache +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-memcache" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,16 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 1118107..b15caf9 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-memcache/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.trampolinerc b/.trampolinerc index 995ee29..383b6ec 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 92e2f10..aacba05 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d1..e783f4c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf22..bcd37bb 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/docs/index.rst b/docs/index.rst index 221e3bf..56540c0 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,14 +2,27 @@ .. include:: multiprocessing.rst -API Reference -------------- +This package includes clients for multiple versions of the Memcache +API. By default, you will get ``v1``, the latest GA version. + +memcache_v1 API Reference +------------------------- + +.. toctree:: + :maxdepth: 2 + + Client (v1) + Types (v1) + + +memcache_v1beta2 API Reference +------------------------------ .. toctree:: :maxdepth: 2 - memcache_v1beta2/services - memcache_v1beta2/types + Client (v1beta2) + Types (v1beta2) Changelog diff --git a/docs/memcache_v1/cloud_memcache.rst b/docs/memcache_v1/cloud_memcache.rst new file mode 100644 index 0000000..0c21866 --- /dev/null +++ b/docs/memcache_v1/cloud_memcache.rst @@ -0,0 +1,11 @@ +CloudMemcache +------------------------------- + +.. automodule:: google.cloud.memcache_v1.services.cloud_memcache + :members: + :inherited-members: + + +.. automodule:: google.cloud.memcache_v1.services.cloud_memcache.pagers + :members: + :inherited-members: diff --git a/docs/memcache_v1/services.rst b/docs/memcache_v1/services.rst new file mode 100644 index 0000000..01fd6b8 --- /dev/null +++ b/docs/memcache_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Memcache v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + cloud_memcache diff --git a/docs/memcache_v1/types.rst b/docs/memcache_v1/types.rst new file mode 100644 index 0000000..06bda21 --- /dev/null +++ b/docs/memcache_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Memcache v1 API +====================================== + +.. automodule:: google.cloud.memcache_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/memcache_v1beta2/cloud_memcache.rst b/docs/memcache_v1beta2/cloud_memcache.rst new file mode 100644 index 0000000..b20fc3a --- /dev/null +++ b/docs/memcache_v1beta2/cloud_memcache.rst @@ -0,0 +1,11 @@ +CloudMemcache +------------------------------- + +.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache + :members: + :inherited-members: + + +.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache.pagers + :members: + :inherited-members: diff --git a/docs/memcache_v1beta2/services.rst b/docs/memcache_v1beta2/services.rst index 6b2845e..e5faef5 100644 --- a/docs/memcache_v1beta2/services.rst +++ b/docs/memcache_v1beta2/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Memcache v1beta2 API ============================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.memcache_v1beta2.services.cloud_memcache - :members: - :inherited-members: + cloud_memcache diff --git a/docs/memcache_v1beta2/types.rst b/docs/memcache_v1beta2/types.rst index 1b47aa6..19e52f6 100644 --- a/docs/memcache_v1beta2/types.rst +++ b/docs/memcache_v1beta2/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Memcache v1beta2 API .. automodule:: google.cloud.memcache_v1beta2.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/memcache/__init__.py b/google/cloud/memcache/__init__.py index b66f063..4075bad 100644 --- a/google/cloud/memcache/__init__.py +++ b/google/cloud/memcache/__init__.py @@ -15,26 +15,22 @@ # limitations under the License. # -from google.cloud.memcache_v1beta2.services.cloud_memcache.async_client import ( +from google.cloud.memcache_v1.services.cloud_memcache.async_client import ( CloudMemcacheAsyncClient, ) -from google.cloud.memcache_v1beta2.services.cloud_memcache.client import ( - CloudMemcacheClient, -) -from google.cloud.memcache_v1beta2.types.cloud_memcache import ApplyParametersRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import CreateInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import DeleteInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import GetInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import Instance -from google.cloud.memcache_v1beta2.types.cloud_memcache import ListInstancesRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import ListInstancesResponse -from google.cloud.memcache_v1beta2.types.cloud_memcache import LocationMetadata -from google.cloud.memcache_v1beta2.types.cloud_memcache import MemcacheParameters -from google.cloud.memcache_v1beta2.types.cloud_memcache import MemcacheVersion -from google.cloud.memcache_v1beta2.types.cloud_memcache import OperationMetadata -from google.cloud.memcache_v1beta2.types.cloud_memcache import UpdateInstanceRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import UpdateParametersRequest -from google.cloud.memcache_v1beta2.types.cloud_memcache import ZoneMetadata +from google.cloud.memcache_v1.services.cloud_memcache.client import CloudMemcacheClient +from google.cloud.memcache_v1.types.cloud_memcache import ApplyParametersRequest +from google.cloud.memcache_v1.types.cloud_memcache import CreateInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import DeleteInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import GetInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import Instance +from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesRequest +from google.cloud.memcache_v1.types.cloud_memcache import ListInstancesResponse +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheParameters +from google.cloud.memcache_v1.types.cloud_memcache import MemcacheVersion +from google.cloud.memcache_v1.types.cloud_memcache import OperationMetadata +from google.cloud.memcache_v1.types.cloud_memcache import UpdateInstanceRequest +from google.cloud.memcache_v1.types.cloud_memcache import UpdateParametersRequest __all__ = ( "ApplyParametersRequest", @@ -46,11 +42,9 @@ "Instance", "ListInstancesRequest", "ListInstancesResponse", - "LocationMetadata", "MemcacheParameters", "MemcacheVersion", "OperationMetadata", "UpdateInstanceRequest", "UpdateParametersRequest", - "ZoneMetadata", ) diff --git a/google/cloud/memcache_v1/__init__.py b/google/cloud/memcache_v1/__init__.py new file mode 100644 index 0000000..4d28d1b --- /dev/null +++ b/google/cloud/memcache_v1/__init__.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_memcache import CloudMemcacheClient +from .types.cloud_memcache import ApplyParametersRequest +from .types.cloud_memcache import CreateInstanceRequest +from .types.cloud_memcache import DeleteInstanceRequest +from .types.cloud_memcache import GetInstanceRequest +from .types.cloud_memcache import Instance +from .types.cloud_memcache import ListInstancesRequest +from .types.cloud_memcache import ListInstancesResponse +from .types.cloud_memcache import MemcacheParameters +from .types.cloud_memcache import MemcacheVersion +from .types.cloud_memcache import OperationMetadata +from .types.cloud_memcache import UpdateInstanceRequest +from .types.cloud_memcache import UpdateParametersRequest + + +__all__ = ( + "ApplyParametersRequest", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemcacheParameters", + "MemcacheVersion", + "OperationMetadata", + "UpdateInstanceRequest", + "UpdateParametersRequest", + "CloudMemcacheClient", +) diff --git a/google/cloud/memcache_v1/py.typed b/google/cloud/memcache_v1/py.typed new file mode 100644 index 0000000..7959cf4 --- /dev/null +++ b/google/cloud/memcache_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memcache package uses inline types. diff --git a/google/cloud/memcache_v1/services/__init__.py b/google/cloud/memcache_v1/services/__init__.py new file mode 100644 index 0000000..42ffdf2 --- /dev/null +++ b/google/cloud/memcache_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/memcache_v1/services/cloud_memcache/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py new file mode 100644 index 0000000..8524cb4 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import CloudMemcacheClient +from .async_client import CloudMemcacheAsyncClient + +__all__ = ( + "CloudMemcacheClient", + "CloudMemcacheAsyncClient", +) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py new file mode 100644 index 0000000..fd21c36 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/async_client.py @@ -0,0 +1,857 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport +from .client import CloudMemcacheClient + + +class CloudMemcacheAsyncClient: + """Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + """ + + _client: CloudMemcacheClient + + DEFAULT_ENDPOINT = CloudMemcacheClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudMemcacheClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(CloudMemcacheClient.instance_path) + parse_instance_path = staticmethod(CloudMemcacheClient.parse_instance_path) + + common_billing_account_path = staticmethod( + CloudMemcacheClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudMemcacheClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudMemcacheClient.common_folder_path) + parse_common_folder_path = staticmethod( + CloudMemcacheClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + CloudMemcacheClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CloudMemcacheClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudMemcacheClient.common_project_path) + parse_common_project_path = staticmethod( + CloudMemcacheClient.parse_common_project_path + ) + + common_location_path = staticmethod(CloudMemcacheClient.common_location_path) + parse_common_location_path = staticmethod( + CloudMemcacheClient.parse_common_location_path + ) + + from_service_account_info = CloudMemcacheClient.from_service_account_info + from_service_account_file = CloudMemcacheClient.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudMemcacheClient).get_transport_class, type(CloudMemcacheClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, CloudMemcacheTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud memcache client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudMemcacheTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = CloudMemcacheClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_instances( + self, + request: cloud_memcache.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists Instances in a given location. + + Args: + request (:class:`google.cloud.memcache_v1.types.ListInstancesRequest`): + The request object. Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesAsyncPager: + Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance( + self, + request: cloud_memcache.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Gets details of a single Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.GetInstanceRequest`): + The request object. Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + name (:class:`str`): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.types.Instance: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_instance( + self, + request: cloud_memcache.CreateInstanceRequest = None, + *, + parent: str = None, + instance: cloud_memcache.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Instance in a given location. + + Args: + request (:class:`google.cloud.memcache_v1.types.CreateInstanceRequest`): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.memcache_v1.types.Instance`): + Required. A Memcached Instance + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The logical name of the Memcached instance in + the user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest = None, + *, + instance: cloud_memcache.Instance = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing Instance in a given project and + location. + + Args: + request (:class:`google.cloud.memcache_v1.types.UpdateInstanceRequest`): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + instance (:class:`google.cloud.memcache_v1.types.Instance`): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + + - ``displayName`` + + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest = None, + *, + name: str = None, + update_mask: field_mask.FieldMask = None, + parameters: cloud_memcache.MemcacheParameters = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.UpdateParametersRequest`): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which the + parameters should be updated. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (:class:`google.cloud.memcache_v1.types.MemcacheParameters`): + The parameters to apply to the + instance. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.UpdateParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.DeleteInstanceRequest`): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + name (:class:`str`): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty.Empty, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest = None, + *, + name: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Args: + request (:class:`google.cloud.memcache_v1.types.ApplyParametersRequest`): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + name (:class:`str`): + Required. Resource name of the + Memcached instance for which parameter + group updates should be applied. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (:class:`Sequence[str]`): + Nodes to which we should apply the + instance-level parameter group. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (:class:`bool`): + Whether to apply instance-level + parameter group to all nodes. If set to + true, will explicitly restrict users + from specifying any nodes, and apply + parameter group updates to all nodes + within the instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if apply_all is not None: + request.apply_all = apply_all + + if node_ids: + request.node_ids.extend(node_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_parameters, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudMemcacheAsyncClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/client.py b/google/cloud/memcache_v1/services/cloud_memcache/client.py new file mode 100644 index 0000000..0ae3f3e --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -0,0 +1,1063 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.types import cloud_memcache +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudMemcacheGrpcTransport +from .transports.grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + + +class CloudMemcacheClientMeta(type): + """Metaclass for the CloudMemcache client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] + _transport_registry["grpc"] = CloudMemcacheGrpcTransport + _transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[CloudMemcacheTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): + """Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "memcache.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudMemcacheTransport: + """Return the transport used by the client instance. + + Returns: + CloudMemcacheTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str, location: str, instance: str,) -> str: + """Return a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, location=location, instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parse a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudMemcacheTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the cloud memcache client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudMemcacheTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudMemcacheTransport): + # transport is a CloudMemcacheTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_instances( + self, + request: cloud_memcache.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given location. + + Args: + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.services.cloud_memcache.pagers.ListInstancesPager: + Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ListInstancesRequest): + request = cloud_memcache.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance( + self, + request: cloud_memcache.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_memcache.Instance: + r"""Gets details of a single Instance. + + Args: + request (google.cloud.memcache_v1.types.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + name (str): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memcache_v1.types.Instance: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.GetInstanceRequest): + request = cloud_memcache.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_instance( + self, + request: cloud_memcache.CreateInstanceRequest = None, + *, + parent: str = None, + instance: cloud_memcache.Instance = None, + instance_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given location. + + Args: + request (google.cloud.memcache_v1.types.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The logical name of the Memcached instance in + the user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.CreateInstanceRequest): + request = cloud_memcache.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance( + self, + request: cloud_memcache.UpdateInstanceRequest = None, + *, + instance: cloud_memcache.Instance = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an existing Instance in a given project and + location. + + Args: + request (google.cloud.memcache_v1.types.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + + - ``displayName`` + + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateInstanceRequest): + request = cloud_memcache.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_parameters( + self, + request: cloud_memcache.UpdateParametersRequest = None, + *, + name: str = None, + update_mask: field_mask.FieldMask = None, + parameters: cloud_memcache.MemcacheParameters = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Args: + request (google.cloud.memcache_v1.types.UpdateParametersRequest): + The request object. Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + name (str): + Required. Resource name of the + Memcached instance for which the + parameters should be updated. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + The parameters to apply to the + instance. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, update_mask, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.UpdateParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.UpdateParametersRequest): + request = cloud_memcache.UpdateParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if update_mask is not None: + request.update_mask = update_mask + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance( + self, + request: cloud_memcache.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + Args: + request (google.cloud.memcache_v1.types.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + name (str): + Required. Memcached instance resource name in the + format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.DeleteInstanceRequest): + request = cloud_memcache.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty.Empty, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_parameters( + self, + request: cloud_memcache.ApplyParametersRequest = None, + *, + name: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Args: + request (google.cloud.memcache_v1.types.ApplyParametersRequest): + The request object. Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + name (str): + Required. Resource name of the + Memcached instance for which parameter + group updates should be applied. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (Sequence[str]): + Nodes to which we should apply the + instance-level parameter group. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (bool): + Whether to apply instance-level + parameter group to all nodes. If set to + true, will explicitly restrict users + from specifying any nodes, and apply + parameter group updates to all nodes + within the instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1.types.Instance` + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ApplyParametersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ApplyParametersRequest): + request = cloud_memcache.ApplyParametersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if node_ids is not None: + request.node_ids = node_ids + if apply_all is not None: + request.apply_all = apply_all + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_parameters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("CloudMemcacheClient",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py new file mode 100644 index 0000000..7a1324e --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) + +from google.cloud.memcache_v1.types import cloud_memcache + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloud_memcache.ListInstancesResponse], + request: cloud_memcache.ListInstancesRequest, + response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memcache_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_memcache.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloud_memcache.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[cloud_memcache.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memcache_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memcache_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloud_memcache.ListInstancesResponse]], + request: cloud_memcache.ListInstancesRequest, + response: cloud_memcache.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memcache_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memcache_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_memcache.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloud_memcache.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[cloud_memcache.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py new file mode 100644 index 0000000..38122c6 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudMemcacheTransport +from .grpc import CloudMemcacheGrpcTransport +from .grpc_asyncio import CloudMemcacheGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudMemcacheTransport]] +_transport_registry["grpc"] = CloudMemcacheGrpcTransport +_transport_registry["grpc_asyncio"] = CloudMemcacheGrpcAsyncIOTransport + +__all__ = ( + "CloudMemcacheTransport", + "CloudMemcacheGrpcTransport", + "CloudMemcacheGrpcAsyncIOTransport", +) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py new file mode 100644 index 0000000..a6a2b3c --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/base.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-memcache",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class CloudMemcacheTransport(abc.ABC): + """Abstract transport class for CloudMemcache.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, default_timeout=1200.0, client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, default_timeout=1200.0, client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, default_timeout=1200.0, client_info=client_info, + ), + self.update_parameters: gapic_v1.method.wrap_method( + self.update_parameters, default_timeout=1200.0, client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, default_timeout=1200.0, client_info=client_info, + ), + self.apply_parameters: gapic_v1.method.wrap_method( + self.apply_parameters, default_timeout=1200.0, client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances( + self, + ) -> typing.Callable[ + [cloud_memcache.ListInstancesRequest], + typing.Union[ + cloud_memcache.ListInstancesResponse, + typing.Awaitable[cloud_memcache.ListInstancesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.GetInstanceRequest], + typing.Union[ + cloud_memcache.Instance, typing.Awaitable[cloud_memcache.Instance] + ], + ]: + raise NotImplementedError() + + @property + def create_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.CreateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def update_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.UpdateInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def update_parameters( + self, + ) -> typing.Callable[ + [cloud_memcache.UpdateParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_instance( + self, + ) -> typing.Callable[ + [cloud_memcache.DeleteInstanceRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def apply_parameters( + self, + ) -> typing.Callable[ + [cloud_memcache.ApplyParametersRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + +__all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py new file mode 100644 index 0000000..bc03a88 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc.py @@ -0,0 +1,477 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO + + +class CloudMemcacheGrpcTransport(CloudMemcacheTransport): + """gRPC backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=self._ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._stubs = {} # type: Dict[str, Callable] + self._operations_client = None + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], cloud_memcache.ListInstancesResponse + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given location. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ListInstances", + request_serializer=cloud_memcache.ListInstancesRequest.serialize, + response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[[cloud_memcache.GetInstanceRequest], cloud_memcache.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/GetInstance", + request_serializer=cloud_memcache.GetInstanceRequest.serialize, + response_deserializer=cloud_memcache.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given location. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", + request_serializer=cloud_memcache.CreateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[[cloud_memcache.UpdateInstanceRequest], operations.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates an existing Instance in a given project and + location. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", + request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def update_parameters( + self, + ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: + r"""Return a callable for the update parameters method over gRPC. + + Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Returns: + Callable[[~.UpdateParametersRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_parameters" not in self._stubs: + self._stubs["update_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", + request_serializer=cloud_memcache.UpdateParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_parameters"] + + @property + def delete_instance( + self, + ) -> Callable[[cloud_memcache.DeleteInstanceRequest], operations.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", + request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def apply_parameters( + self, + ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: + r"""Return a callable for the apply parameters method over gRPC. + + ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Returns: + Callable[[~.ApplyParametersRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_parameters" not in self._stubs: + self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", + request_serializer=cloud_memcache.ApplyParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_parameters"] + + +__all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py new file mode 100644 index 0000000..c11f7c5 --- /dev/null +++ b/google/cloud/memcache_v1/services/cloud_memcache/transports/grpc_asyncio.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 as operations # type: ignore + +from .base import CloudMemcacheTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudMemcacheGrpcTransport + + +class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): + """gRPC AsyncIO backend transport for CloudMemcache. + + Configures and manages Cloud Memorystore for Memcached instances. + + The ``memcache.googleapis.com`` service implements the Google Cloud + Memorystore for Memcached API and defines the following resource + model for managing Memorystore Memcached (also called Memcached + below) instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Memcached instances, named: + ``/instances/*`` + - As such, Memcached instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be a GCP ``region``; for example: + + - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "memcache.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=self._ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + self._operations_client = None + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances( + self, + ) -> Callable[ + [cloud_memcache.ListInstancesRequest], + Awaitable[cloud_memcache.ListInstancesResponse], + ]: + r"""Return a callable for the list instances method over gRPC. + + Lists Instances in a given location. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ListInstances", + request_serializer=cloud_memcache.ListInstancesRequest.serialize, + response_deserializer=cloud_memcache.ListInstancesResponse.deserialize, + ) + return self._stubs["list_instances"] + + @property + def get_instance( + self, + ) -> Callable[ + [cloud_memcache.GetInstanceRequest], Awaitable[cloud_memcache.Instance] + ]: + r"""Return a callable for the get instance method over gRPC. + + Gets details of a single Instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/GetInstance", + request_serializer=cloud_memcache.GetInstanceRequest.serialize, + response_deserializer=cloud_memcache.Instance.deserialize, + ) + return self._stubs["get_instance"] + + @property + def create_instance( + self, + ) -> Callable[ + [cloud_memcache.CreateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the create instance method over gRPC. + + Creates a new Instance in a given location. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/CreateInstance", + request_serializer=cloud_memcache.CreateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instance"] + + @property + def update_instance( + self, + ) -> Callable[ + [cloud_memcache.UpdateInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update instance method over gRPC. + + Updates an existing Instance in a given project and + location. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateInstance", + request_serializer=cloud_memcache.UpdateInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_instance"] + + @property + def update_parameters( + self, + ) -> Callable[ + [cloud_memcache.UpdateParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the update parameters method over gRPC. + + Updates the defined Memcached Parameters for an + existing Instance. This method only stages the + parameters, it must be followed by ApplyParameters to + apply the parameters to nodes of the Memcached Instance. + + Returns: + Callable[[~.UpdateParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_parameters" not in self._stubs: + self._stubs["update_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/UpdateParameters", + request_serializer=cloud_memcache.UpdateParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["update_parameters"] + + @property + def delete_instance( + self, + ) -> Callable[ + [cloud_memcache.DeleteInstanceRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a single Instance. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/DeleteInstance", + request_serializer=cloud_memcache.DeleteInstanceRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["delete_instance"] + + @property + def apply_parameters( + self, + ) -> Callable[ + [cloud_memcache.ApplyParametersRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the apply parameters method over gRPC. + + ApplyParameters will restart the set of specified + nodes in order to update them to the current set of + parameters for the Memcached Instance. + + Returns: + Callable[[~.ApplyParametersRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_parameters" not in self._stubs: + self._stubs["apply_parameters"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1.CloudMemcache/ApplyParameters", + request_serializer=cloud_memcache.ApplyParametersRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_parameters"] + + +__all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1/types/__init__.py b/google/cloud/memcache_v1/types/__init__.py new file mode 100644 index 0000000..5fe285a --- /dev/null +++ b/google/cloud/memcache_v1/types/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .cloud_memcache import ( + Instance, + ListInstancesRequest, + ListInstancesResponse, + GetInstanceRequest, + CreateInstanceRequest, + UpdateInstanceRequest, + DeleteInstanceRequest, + ApplyParametersRequest, + UpdateParametersRequest, + MemcacheParameters, + OperationMetadata, + MemcacheVersion, +) + +__all__ = ( + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "ApplyParametersRequest", + "UpdateParametersRequest", + "MemcacheParameters", + "OperationMetadata", + "MemcacheVersion", +) diff --git a/google/cloud/memcache_v1/types/cloud_memcache.py b/google/cloud/memcache_v1/types/cloud_memcache.py new file mode 100644 index 0000000..066bc9d --- /dev/null +++ b/google/cloud/memcache_v1/types/cloud_memcache.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.memcache.v1", + manifest={ + "MemcacheVersion", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "ApplyParametersRequest", + "UpdateParametersRequest", + "MemcacheParameters", + "OperationMetadata", + }, +) + + +class MemcacheVersion(proto.Enum): + r"""Memcached versions supported by our service.""" + MEMCACHE_VERSION_UNSPECIFIED = 0 + MEMCACHE_1_5 = 1 + + +class Instance(proto.Message): + r""" + + Attributes: + name (str): + Required. Unique name of the resource in this scope + including project and location using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note: Memcached instances are managed and addressed at + regional level so location_id here refers to a GCP region; + however, users may choose which zones Memcached nodes within + an instances should be provisioned in. Refer to [zones] + field for more details. + display_name (str): + User provided name for the instance only used + for display purposes. Cannot be more than 80 + characters. + labels (Sequence[google.cloud.memcache_v1.types.Instance.LabelsEntry]): + Resource labels to represent user-provided + metadata. Refer to cloud documentation on labels + for more details. + https://cloud.google.com/compute/docs/labeling- + resources + authorized_network (str): + The full name of the Google Compute Engine + `network `__ + to which the instance is connected. If left unspecified, the + ``default`` network will be used. + zones (Sequence[str]): + Zones where Memcached nodes should be + provisioned in. Memcached nodes will be equally + distributed across these zones. If not provided, + the service will by default create nodes in all + zones in the region for the instance. + node_count (int): + Required. Number of nodes in the Memcached + instance. + node_config (google.cloud.memcache_v1.types.Instance.NodeConfig): + Required. Configuration for Memcached nodes. + memcache_version (google.cloud.memcache_v1.types.MemcacheVersion): + The major version of Memcached software. If not provided, + latest supported version will be used. Currently the latest + supported major version is MEMCACHE_1_5. The minor version + will be automatically determined by our system based on the + latest supported minor version. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + Optional: User defined parameters to apply to + the memcached process on each node. + memcache_nodes (Sequence[google.cloud.memcache_v1.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to [Node] + message for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + updated. + state (google.cloud.memcache_v1.types.Instance.State): + Output only. The state of this Memcached + instance. + memcache_full_version (str): + Output only. The full version of memcached + server running on this instance. System + automatically determines the full memcached + version for an instance based on the input + MemcacheVersion. + The full version format will be + "memcached-1.5.16". + instance_messages (Sequence[google.cloud.memcache_v1.types.Instance.InstanceMessage]): + List of messages that describe current + statuses of memcached instance. + discovery_endpoint (str): + Output only. Endpoint for Discovery API + """ + + class State(proto.Enum): + r"""Different states of a Memcached instance.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 4 + PERFORMING_MAINTENANCE = 5 + + class NodeConfig(proto.Message): + r"""Configuration for a Memcached Node. + + Attributes: + cpu_count (int): + Required. Number of cpus per Memcached node. + memory_size_mb (int): + Required. Memory size in MiB for each + Memcached node. + """ + + cpu_count = proto.Field(proto.INT32, number=1) + + memory_size_mb = proto.Field(proto.INT32, number=2) + + class Node(proto.Message): + r""" + + Attributes: + node_id (str): + Output only. Identifier of the Memcached + node. The node id does not include project or + location like the Memcached instance name. + zone (str): + Output only. Location (GCP Zone) for the + Memcached node. + state (google.cloud.memcache_v1.types.Instance.Node.State): + Output only. Current state of the Memcached + node. + host (str): + Output only. Hostname or IP address of the + Memcached node used by the clients to connect to + the Memcached server on this node. + port (int): + Output only. The port number of the Memcached + server on this node. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + User defined parameters currently applied to + the node. + """ + + class State(proto.Enum): + r"""Different states of a Memcached node.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + DELETING = 3 + UPDATING = 4 + + node_id = proto.Field(proto.STRING, number=1) + + zone = proto.Field(proto.STRING, number=2) + + state = proto.Field(proto.ENUM, number=3, enum="Instance.Node.State",) + + host = proto.Field(proto.STRING, number=4) + + port = proto.Field(proto.INT32, number=5) + + parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) + + class InstanceMessage(proto.Message): + r""" + + Attributes: + code (google.cloud.memcache_v1.types.Instance.InstanceMessage.Code): + A code that correspond to one type of user- + acing message. + message (str): + Message on memcached instance which will be + exposed to users. + """ + + class Code(proto.Enum): + r"""""" + CODE_UNSPECIFIED = 0 + ZONE_DISTRIBUTION_UNBALANCED = 1 + + code = proto.Field(proto.ENUM, number=1, enum="Instance.InstanceMessage.Code",) + + message = proto.Field(proto.STRING, number=2) + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + authorized_network = proto.Field(proto.STRING, number=4) + + zones = proto.RepeatedField(proto.STRING, number=5) + + node_count = proto.Field(proto.INT32, number=6) + + node_config = proto.Field(proto.MESSAGE, number=7, message=NodeConfig,) + + memcache_version = proto.Field(proto.ENUM, number=9, enum="MemcacheVersion",) + + parameters = proto.Field(proto.MESSAGE, number=11, message="MemcacheParameters",) + + memcache_nodes = proto.RepeatedField(proto.MESSAGE, number=12, message=Node,) + + create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=15, enum=State,) + + memcache_full_version = proto.Field(proto.STRING, number=18) + + instance_messages = proto.RepeatedField( + proto.MESSAGE, number=19, message=InstanceMessage, + ) + + discovery_endpoint = proto.Field(proto.STRING, number=20) + + +class ListInstancesRequest(proto.Message): + r"""Request for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region + page_size (int): + The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + filter (str): + List filter. For example, exclude all + Memcached instances with name as my-instance by + specifying "name != my-instance". + order_by (str): + Sort results. Supported values are "name", + "name desc" or "" (unsorted). + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + filter = proto.Field(proto.STRING, number=4) + + order_by = proto.Field(proto.STRING, number=5) + + +class ListInstancesResponse(proto.Message): + r"""Response for + [ListInstances][google.cloud.memcache.v1.CloudMemcache.ListInstances]. + + Attributes: + instances (Sequence[google.cloud.memcache_v1.types.Instance]): + A list of Memcached instances in the project in the + specified location, or across all locations. + + If the ``location_id`` in the parent field of the request is + "-", all regions available to the project are queried, and + the results aggregated. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) + + next_page_token = proto.Field(proto.STRING, number=2) + + unreachable = proto.RepeatedField(proto.STRING, number=3) + + +class GetInstanceRequest(proto.Message): + r"""Request for + [GetInstance][google.cloud.memcache.v1.CloudMemcache.GetInstance]. + + Attributes: + name (str): + Required. Memcached instance resource name in the format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateInstanceRequest(proto.Message): + r"""Request for + [CreateInstance][google.cloud.memcache.v1.CloudMemcache.CreateInstance]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region + instance_id (str): + Required. The logical name of the Memcached instance in the + user project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the user project / location. + + If any of the above are not met, will raise an invalid + argument error. + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance + """ + + parent = proto.Field(proto.STRING, number=1) + + instance_id = proto.Field(proto.STRING, number=2) + + instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) + + +class UpdateInstanceRequest(proto.Message): + r"""Request for + [UpdateInstance][google.cloud.memcache.v1.CloudMemcache.UpdateInstance]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + + - ``displayName`` + instance (google.cloud.memcache_v1.types.Instance): + Required. A Memcached Instance. Only fields specified in + update_mask are updated. + """ + + update_mask = proto.Field(proto.MESSAGE, number=1, message=field_mask.FieldMask,) + + instance = proto.Field(proto.MESSAGE, number=2, message="Instance",) + + +class DeleteInstanceRequest(proto.Message): + r"""Request for + [DeleteInstance][google.cloud.memcache.v1.CloudMemcache.DeleteInstance]. + + Attributes: + name (str): + Required. Memcached instance resource name in the format: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region + """ + + name = proto.Field(proto.STRING, number=1) + + +class ApplyParametersRequest(proto.Message): + r"""Request for + [ApplyParameters][google.cloud.memcache.v1.CloudMemcache.ApplyParameters]. + + Attributes: + name (str): + Required. Resource name of the Memcached + instance for which parameter group updates + should be applied. + node_ids (Sequence[str]): + Nodes to which we should apply the instance- + evel parameter group. + apply_all (bool): + Whether to apply instance-level parameter + group to all nodes. If set to true, will + explicitly restrict users from specifying any + nodes, and apply parameter group updates to all + nodes within the instance. + """ + + name = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) + + +class UpdateParametersRequest(proto.Message): + r"""Request for + [UpdateParameters][google.cloud.memcache.v1.CloudMemcache.UpdateParameters]. + + Attributes: + name (str): + Required. Resource name of the Memcached + instance for which the parameters should be + updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + parameters (google.cloud.memcache_v1.types.MemcacheParameters): + The parameters to apply to the instance. + """ + + name = proto.Field(proto.STRING, number=1) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) + + +class MemcacheParameters(proto.Message): + r""" + + Attributes: + id (str): + Output only. The unique ID associated with + this set of parameters. Users can use this id to + determine if the parameters associated with the + instance differ from the parameters associated + with the nodes and any action needs to be taken + to apply parameters on nodes. + params (Sequence[google.cloud.memcache_v1.types.MemcacheParameters.ParamsEntry]): + User defined set of parameters to use in the + memcached process. + """ + + id = proto.Field(proto.STRING, number=1) + + params = proto.MapField(proto.STRING, proto.STRING, number=3) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_detail (str): + Output only. Human-readable status of the + operation, if any. + cancel_requested (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + target = proto.Field(proto.STRING, number=3) + + verb = proto.Field(proto.STRING, number=4) + + status_detail = proto.Field(proto.STRING, number=5) + + cancel_requested = proto.Field(proto.BOOL, number=6) + + api_version = proto.Field(proto.STRING, number=7) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/memcache_v1beta2/__init__.py b/google/cloud/memcache_v1beta2/__init__.py index 13cd09f..bac2393 100644 --- a/google/cloud/memcache_v1beta2/__init__.py +++ b/google/cloud/memcache_v1beta2/__init__.py @@ -17,6 +17,7 @@ from .services.cloud_memcache import CloudMemcacheClient from .types.cloud_memcache import ApplyParametersRequest +from .types.cloud_memcache import ApplySoftwareUpdateRequest from .types.cloud_memcache import CreateInstanceRequest from .types.cloud_memcache import DeleteInstanceRequest from .types.cloud_memcache import GetInstanceRequest @@ -34,6 +35,7 @@ __all__ = ( "ApplyParametersRequest", + "ApplySoftwareUpdateRequest", "CreateInstanceRequest", "DeleteInstanceRequest", "GetInstanceRequest", diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py index 9373a06..d0d7b85 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/async_client.py @@ -58,8 +58,7 @@ class CloudMemcacheAsyncClient: - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` """ @@ -101,6 +100,7 @@ class CloudMemcacheAsyncClient: CloudMemcacheClient.parse_common_location_path ) + from_service_account_info = CloudMemcacheClient.from_service_account_info from_service_account_file = CloudMemcacheClient.from_service_account_file from_service_account_json = from_service_account_file @@ -174,10 +174,10 @@ async def list_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: - r"""Lists Instances in a given project and location. + r"""Lists Instances in a given location. Args: - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.ListInstancesRequest`): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. parent (:class:`str`): @@ -185,6 +185,7 @@ async def list_instances( using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -196,7 +197,7 @@ async def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesAsyncPager: + google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesAsyncPager: Response for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. @@ -260,7 +261,7 @@ async def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.cloud_memcache.GetInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.GetInstanceRequest`): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. name (:class:`str`): @@ -268,6 +269,7 @@ async def get_instance( format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -279,8 +281,8 @@ async def get_instance( sent along with the request as metadata. Returns: - ~.cloud_memcache.Instance: - + google.cloud.memcache_v1beta2.types.Instance: + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -331,11 +333,10 @@ async def create_instance( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Creates a new Instance in a given project and - location. + r"""Creates a new Instance in a given location. Args: - request (:class:`~.cloud_memcache.CreateInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.CreateInstanceRequest`): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. parent (:class:`str`): @@ -343,6 +344,7 @@ async def create_instance( using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -355,12 +357,15 @@ async def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): Required. A Memcached [Instance] resource This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this @@ -373,11 +378,12 @@ async def create_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -444,20 +450,22 @@ async def update_instance( location. Args: - request (:class:`~.cloud_memcache.UpdateInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.UpdateInstanceRequest`): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Mask of fields to update. - ``displayName`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (:class:`google.cloud.memcache_v1beta2.types.Instance`): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -469,11 +477,12 @@ async def update_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -537,30 +546,32 @@ async def update_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Args: - request (:class:`~.cloud_memcache.UpdateParametersRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.UpdateParametersRequest`): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. name (:class:`str`): Required. Resource name of the Memcached instance for which the parameters should be updated. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`~.cloud_memcache.MemcacheParameters`): + parameters (:class:`google.cloud.memcache_v1beta2.types.MemcacheParameters`): The parameters to apply to the instance. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -572,11 +583,12 @@ async def update_parameters( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -641,13 +653,15 @@ async def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.cloud_memcache.DeleteInstanceRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.DeleteInstanceRequest`): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. name (:class:`str`): - Memcached instance resource name in the format: + Required. Memcached instance resource name in the + format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -659,24 +673,22 @@ async def delete_instance( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -736,33 +748,35 @@ async def apply_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Args: - request (:class:`~.cloud_memcache.ApplyParametersRequest`): + request (:class:`google.cloud.memcache_v1beta2.types.ApplyParametersRequest`): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. name (:class:`str`): Required. Resource name of the Memcached instance for which parameter group updates should be applied. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + Nodes to which the instance-level + parameter group is applied. + This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. + This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -774,11 +788,12 @@ async def apply_parameters( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -832,6 +847,118 @@ async def apply_parameters( # Done; return the response. return response + async def apply_software_update( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest = None, + *, + instance: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates software on the selected nodes of the + Instance. + + Args: + request (:class:`google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest`): + The request object. Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + instance (:class:`str`): + Required. Resource name of the + Memcached instance for which software + update should be applied. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (:class:`Sequence[str]`): + Nodes to which we should apply the + update to. Note all the selected nodes + are updated in parallel. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (:class:`bool`): + Whether to apply the update to all + nodes. If set to true, will explicitly + restrict users from specifying any + nodes, and apply software update to all + nodes (where applicable) within the + instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_memcache.ApplySoftwareUpdateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if apply_all is not None: + request.apply_all = apply_all + + if node_ids: + request.node_ids.extend(node_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_software_update, + default_timeout=1200.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index 0a1ef7c..0da8b35 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -93,8 +93,7 @@ class CloudMemcacheClient(metaclass=CloudMemcacheClientMeta): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` """ @@ -133,6 +132,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudMemcacheClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -145,7 +160,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + CloudMemcacheClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -253,10 +268,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.CloudMemcacheTransport]): The + transport (Union[str, CloudMemcacheTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -292,21 +307,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -349,7 +360,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -363,17 +374,18 @@ def list_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: - r"""Lists Instances in a given project and location. + r"""Lists Instances in a given location. Args: - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The request object. Request for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. - parent (:class:`str`): + parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -385,7 +397,7 @@ def list_instances( sent along with the request as metadata. Returns: - ~.pagers.ListInstancesPager: + google.cloud.memcache_v1beta2.services.cloud_memcache.pagers.ListInstancesPager: Response for [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. @@ -450,14 +462,15 @@ def get_instance( r"""Gets details of a single Instance. Args: - request (:class:`~.cloud_memcache.GetInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.GetInstanceRequest): The request object. Request for [GetInstance][google.cloud.memcache.v1beta2.CloudMemcache.GetInstance]. - name (:class:`str`): + name (str): Required. Memcached instance resource name in the format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -469,8 +482,8 @@ def get_instance( sent along with the request as metadata. Returns: - ~.cloud_memcache.Instance: - + google.cloud.memcache_v1beta2.types.Instance: + A Memorystore for Memcached instance """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -522,22 +535,22 @@ def create_instance( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Creates a new Instance in a given project and - location. + r"""Creates a new Instance in a given location. Args: - request (:class:`~.cloud_memcache.CreateInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.CreateInstanceRequest): The request object. Request for [CreateInstance][google.cloud.memcache.v1beta2.CloudMemcache.CreateInstance]. - parent (:class:`str`): + parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - instance_id (:class:`str`): + instance_id (str): Required. The logical name of the Memcached instance in the user project with the following restrictions: @@ -546,12 +559,15 @@ def create_instance( - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an + invalid argument error. This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this @@ -564,11 +580,12 @@ def create_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -636,20 +653,22 @@ def update_instance( location. Args: - request (:class:`~.cloud_memcache.UpdateInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.UpdateInstanceRequest): The request object. Request for [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - ``displayName`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - resource (:class:`~.cloud_memcache.Instance`): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -661,11 +680,12 @@ def update_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -730,30 +750,32 @@ def update_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + r"""Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Args: - request (:class:`~.cloud_memcache.UpdateParametersRequest`): + request (google.cloud.memcache_v1beta2.types.UpdateParametersRequest): The request object. Request for [UpdateParameters][google.cloud.memcache.v1beta2.CloudMemcache.UpdateParameters]. - name (:class:`str`): + name (str): Required. Resource name of the Memcached instance for which the parameters should be updated. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - parameters (:class:`~.cloud_memcache.MemcacheParameters`): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): The parameters to apply to the instance. + This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -765,11 +787,12 @@ def update_parameters( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -835,13 +858,15 @@ def delete_instance( r"""Deletes a single Instance. Args: - request (:class:`~.cloud_memcache.DeleteInstanceRequest`): + request (google.cloud.memcache_v1beta2.types.DeleteInstanceRequest): The request object. Request for [DeleteInstance][google.cloud.memcache.v1beta2.CloudMemcache.DeleteInstance]. - name (:class:`str`): - Memcached instance resource name in the format: + name (str): + Required. Memcached instance resource name in the + format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -853,24 +878,22 @@ def delete_instance( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -931,33 +954,35 @@ def apply_parameters( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + r"""``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Args: - request (:class:`~.cloud_memcache.ApplyParametersRequest`): + request (google.cloud.memcache_v1beta2.types.ApplyParametersRequest): The request object. Request for [ApplyParameters][google.cloud.memcache.v1beta2.CloudMemcache.ApplyParameters]. - name (:class:`str`): + name (str): Required. Resource name of the Memcached instance for which parameter group updates should be applied. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - node_ids (:class:`Sequence[str]`): - Nodes to which we should apply the - instance-level parameter group. + node_ids (Sequence[str]): + Nodes to which the instance-level + parameter group is applied. + This corresponds to the ``node_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - apply_all (:class:`bool`): - Whether to apply instance-level - parameter group to all nodes. If set to - true, will explicitly restrict users - from specifying any nodes, and apply - parameter group updates to all nodes - within the instance. + apply_all (bool): + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from + specifying individual nodes, and ``ApplyParameters`` + updates all nodes within the instance. + This corresponds to the ``apply_all`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -969,11 +994,12 @@ def apply_parameters( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be - :class:``~.cloud_memcache.Instance``: + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance """ # Create or coerce a protobuf request object. @@ -998,12 +1024,11 @@ def apply_parameters( if name is not None: request.name = name + if node_ids is not None: + request.node_ids = node_ids if apply_all is not None: request.apply_all = apply_all - if node_ids: - request.node_ids.extend(node_ids) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.apply_parameters] @@ -1028,6 +1053,118 @@ def apply_parameters( # Done; return the response. return response + def apply_software_update( + self, + request: cloud_memcache.ApplySoftwareUpdateRequest = None, + *, + instance: str = None, + node_ids: Sequence[str] = None, + apply_all: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates software on the selected nodes of the + Instance. + + Args: + request (google.cloud.memcache_v1beta2.types.ApplySoftwareUpdateRequest): + The request object. Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + instance (str): + Required. Resource name of the + Memcached instance for which software + update should be applied. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_ids (Sequence[str]): + Nodes to which we should apply the + update to. Note all the selected nodes + are updated in parallel. + + This corresponds to the ``node_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + apply_all (bool): + Whether to apply the update to all + nodes. If set to true, will explicitly + restrict users from specifying any + nodes, and apply software update to all + nodes (where applicable) within the + instance. + + This corresponds to the ``apply_all`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memcache_v1beta2.types.Instance` A + Memorystore for Memcached instance + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, node_ids, apply_all]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_memcache.ApplySoftwareUpdateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_memcache.ApplySoftwareUpdateRequest): + request = cloud_memcache.ApplySoftwareUpdateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if instance is not None: + request.instance = instance + if node_ids is not None: + request.node_ids = node_ids + if apply_all is not None: + request.apply_all = apply_all + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.apply_software_update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("instance", request.instance),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_memcache.Instance, + metadata_type=cloud_memcache.OperationMetadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py index 7e7696a..5b69afd 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.memcache_v1beta2.types import cloud_memcache @@ -24,7 +33,7 @@ class ListInstancesPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.cloud_memcache.ListInstancesResponse` object, and + :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and provides an ``__iter__`` method to iterate through its ``resources`` field. @@ -33,7 +42,7 @@ class ListInstancesPager: through the ``resources`` field on the corresponding responses. - All the usual :class:`~.cloud_memcache.ListInstancesResponse` + All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The initial request object. - response (:class:`~.cloud_memcache.ListInstancesResponse`): + response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListInstancesAsyncPager: """A pager for iterating through ``list_instances`` requests. This class thinly wraps an initial - :class:`~.cloud_memcache.ListInstancesResponse` object, and + :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` object, and provides an ``__aiter__`` method to iterate through its ``resources`` field. @@ -95,7 +104,7 @@ class ListInstancesAsyncPager: through the ``resources`` field on the corresponding responses. - All the usual :class:`~.cloud_memcache.ListInstancesResponse` + All the usual :class:`google.cloud.memcache_v1beta2.types.ListInstancesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.cloud_memcache.ListInstancesRequest`): + request (google.cloud.memcache_v1beta2.types.ListInstancesRequest): The initial request object. - response (:class:`~.cloud_memcache.ListInstancesResponse`): + response (google.cloud.memcache_v1beta2.types.ListInstancesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py index 3a96e70..2c598e4 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/base.py @@ -127,6 +127,11 @@ def _prep_wrapped_messages(self, client_info): self.apply_parameters: gapic_v1.method.wrap_method( self.apply_parameters, default_timeout=1200.0, client_info=client_info, ), + self.apply_software_update: gapic_v1.method.wrap_method( + self.apply_software_update, + default_timeout=1200.0, + client_info=client_info, + ), } @property @@ -202,5 +207,14 @@ def apply_parameters( ]: raise NotImplementedError() + @property + def apply_software_update( + self, + ) -> typing.Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + __all__ = ("CloudMemcacheTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py index 09e1170..d1ffc02 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc.py @@ -52,8 +52,7 @@ class CloudMemcacheGrpcTransport(CloudMemcacheTransport): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` @@ -78,6 +77,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -108,6 +108,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -124,6 +128,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -133,11 +142,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -181,12 +185,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ @@ -281,7 +291,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists Instances in a given location. Returns: Callable[[~.ListInstancesRequest], @@ -333,8 +343,7 @@ def create_instance( ) -> Callable[[cloud_memcache.CreateInstanceRequest], operations.Operation]: r"""Return a callable for the create instance method over gRPC. - Creates a new Instance in a given project and - location. + Creates a new Instance in a given location. Returns: Callable[[~.CreateInstanceRequest], @@ -387,10 +396,10 @@ def update_parameters( ) -> Callable[[cloud_memcache.UpdateParametersRequest], operations.Operation]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -442,8 +451,9 @@ def apply_parameters( ) -> Callable[[cloud_memcache.ApplyParametersRequest], operations.Operation]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -463,5 +473,32 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def apply_software_update( + self, + ) -> Callable[[cloud_memcache.ApplySoftwareUpdateRequest], operations.Operation]: + r"""Return a callable for the apply software update method over gRPC. + + Updates software on the selected nodes of the + Instance. + + Returns: + Callable[[~.ApplySoftwareUpdateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_software_update" not in self._stubs: + self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", + request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_software_update"] + __all__ = ("CloudMemcacheGrpcTransport",) diff --git a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py index 009acf5..24f0f89 100644 --- a/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py +++ b/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/grpc_asyncio.py @@ -54,8 +54,7 @@ class CloudMemcacheGrpcAsyncIOTransport(CloudMemcacheTransport): - As such, Memcached instances are resources of the form: ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note that location_id must be refering to a GCP ``region``; for - example: + Note that location_id must be a GCP ``region``; for example: - ``projects/my-memcached-project/locations/us-central1/instances/my-memcached`` @@ -122,6 +121,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -153,6 +153,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -169,6 +173,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -178,11 +187,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -226,12 +230,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ @@ -288,7 +298,7 @@ def list_instances( ]: r"""Return a callable for the list instances method over gRPC. - Lists Instances in a given project and location. + Lists Instances in a given location. Returns: Callable[[~.ListInstancesRequest], @@ -344,8 +354,7 @@ def create_instance( ]: r"""Return a callable for the create instance method over gRPC. - Creates a new Instance in a given project and - location. + Creates a new Instance in a given location. Returns: Callable[[~.CreateInstanceRequest], @@ -402,10 +411,10 @@ def update_parameters( ]: r"""Return a callable for the update parameters method over gRPC. - Updates the defined Memcached Parameters for an - existing Instance. This method only stages the - parameters, it must be followed by ApplyParameters to - apply the parameters to nodes of the Memcached Instance. + Updates the defined Memcached parameters for an existing + instance. This method only stages the parameters, it must be + followed by ``ApplyParameters`` to apply the parameters to nodes + of the Memcached instance. Returns: Callable[[~.UpdateParametersRequest], @@ -461,8 +470,9 @@ def apply_parameters( ]: r"""Return a callable for the apply parameters method over gRPC. - ApplyParameters will update current set of Parameters - to the set of specified nodes of the Memcached Instance. + ``ApplyParameters`` restarts the set of specified nodes in order + to update them to the current set of parameters for the + Memcached Instance. Returns: Callable[[~.ApplyParametersRequest], @@ -482,5 +492,34 @@ def apply_parameters( ) return self._stubs["apply_parameters"] + @property + def apply_software_update( + self, + ) -> Callable[ + [cloud_memcache.ApplySoftwareUpdateRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the apply software update method over gRPC. + + Updates software on the selected nodes of the + Instance. + + Returns: + Callable[[~.ApplySoftwareUpdateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_software_update" not in self._stubs: + self._stubs["apply_software_update"] = self.grpc_channel.unary_unary( + "/google.cloud.memcache.v1beta2.CloudMemcache/ApplySoftwareUpdate", + request_serializer=cloud_memcache.ApplySoftwareUpdateRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["apply_software_update"] + __all__ = ("CloudMemcacheGrpcAsyncIOTransport",) diff --git a/google/cloud/memcache_v1beta2/types/__init__.py b/google/cloud/memcache_v1beta2/types/__init__.py index 8619a5a..9fa6727 100644 --- a/google/cloud/memcache_v1beta2/types/__init__.py +++ b/google/cloud/memcache_v1beta2/types/__init__.py @@ -25,6 +25,7 @@ DeleteInstanceRequest, ApplyParametersRequest, UpdateParametersRequest, + ApplySoftwareUpdateRequest, MemcacheParameters, OperationMetadata, LocationMetadata, @@ -42,6 +43,7 @@ "DeleteInstanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", + "ApplySoftwareUpdateRequest", "MemcacheParameters", "OperationMetadata", "LocationMetadata", diff --git a/google/cloud/memcache_v1beta2/types/cloud_memcache.py b/google/cloud/memcache_v1beta2/types/cloud_memcache.py index 36cd39e..b7e9a43 100644 --- a/google/cloud/memcache_v1beta2/types/cloud_memcache.py +++ b/google/cloud/memcache_v1beta2/types/cloud_memcache.py @@ -35,6 +35,7 @@ "DeleteInstanceRequest", "ApplyParametersRequest", "UpdateParametersRequest", + "ApplySoftwareUpdateRequest", "MemcacheParameters", "OperationMetadata", "LocationMetadata", @@ -50,7 +51,7 @@ class MemcacheVersion(proto.Enum): class Instance(proto.Message): - r""" + r"""A Memorystore for Memcached instance Attributes: name (str): @@ -58,57 +59,59 @@ class Instance(proto.Message): including project and location using the form: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - Note: Memcached instances are managed and addressed at - regional level so location_id here refers to a GCP region; - however, users may choose which zones Memcached nodes within - an instances should be provisioned in. Refer to [zones] - field for more details. + Note: Memcached instances are managed and addressed at the + regional level so ``location_id`` here refers to a Google + Cloud region; however, users may choose which zones + Memcached nodes should be provisioned in within an instance. + Refer to + [zones][google.cloud.memcache.v1beta2.Instance.zones] field + for more details. display_name (str): - Optional. User provided name for the instance + User provided name for the instance, which is only used for display purposes. Cannot be more than 80 characters. - labels (Sequence[~.cloud_memcache.Instance.LabelsEntry]): - Optional. Resource labels to represent user- - rovided metadata. Refer to cloud documentation - on labels for more details. + labels (Sequence[google.cloud.memcache_v1beta2.types.Instance.LabelsEntry]): + Resource labels to represent user-provided + metadata. Refer to cloud documentation on labels + for more details. https://cloud.google.com/compute/docs/labeling- resources authorized_network (str): - Optional. The full name of the Google Compute Engine + The full name of the Google Compute Engine `network `__ to which the instance is connected. If left unspecified, the ``default`` network will be used. zones (Sequence[str]): - Optional. Zones where Memcached nodes should - be provisioned in. Memcached nodes will be - equally distributed across these zones. If not - provided, the service will by default create - nodes in all zones in the region for the - instance. + Zones in which Memcached nodes should be + provisioned. Memcached nodes will be equally + distributed across these zones. If not provided, + the service will by default create nodes in all + zones in the region for the instance. node_count (int): Required. Number of nodes in the Memcached instance. - node_config (~.cloud_memcache.Instance.NodeConfig): + node_config (google.cloud.memcache_v1beta2.types.Instance.NodeConfig): Required. Configuration for Memcached nodes. - memcache_version (~.cloud_memcache.MemcacheVersion): - Optional. The major version of Memcached software. If not - provided, latest supported version will be used. Currently - the latest supported major version is MEMCACHE_1_5. The - minor version will be automatically determined by our system - based on the latest supported minor version. - parameters (~.cloud_memcache.MemcacheParameters): + memcache_version (google.cloud.memcache_v1beta2.types.MemcacheVersion): + The major version of Memcached software. If not provided, + latest supported version will be used. Currently the latest + supported major version is ``MEMCACHE_1_5``. The minor + version will be automatically determined by our system based + on the latest supported minor version. + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): Optional: User defined parameters to apply to the memcached process on each node. - memcache_nodes (Sequence[~.cloud_memcache.Instance.Node]): - Output only. List of Memcached nodes. Refer to [Node] - message for more details. - create_time (~.timestamp.Timestamp): + memcache_nodes (Sequence[google.cloud.memcache_v1beta2.types.Instance.Node]): + Output only. List of Memcached nodes. Refer to + [Node][google.cloud.memcache.v1beta2.Instance.Node] message + for more details. + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was updated. - state (~.cloud_memcache.Instance.State): + state (google.cloud.memcache_v1beta2.types.Instance.State): Output only. The state of this Memcached instance. memcache_full_version (str): @@ -119,17 +122,18 @@ class Instance(proto.Message): MemcacheVersion. The full version format will be "memcached-1.5.16". - instance_messages (Sequence[~.cloud_memcache.Instance.InstanceMessage]): - List of messages that describe current - statuses of memcached instance. + instance_messages (Sequence[google.cloud.memcache_v1beta2.types.Instance.InstanceMessage]): + List of messages that describe the current + state of the Memcached instance. discovery_endpoint (str): - Output only. Endpoint for Discovery API + Output only. Endpoint for the Discovery API. + update_available (bool): + Output only. Returns true if there is an + update waiting to be applied """ class State(proto.Enum): - r"""Different states of a Memcached instance. - LINT.IfChange - """ + r"""Different states of a Memcached instance.""" STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -162,7 +166,7 @@ class Node(proto.Message): zone (str): Output only. Location (GCP Zone) for the Memcached node. - state (~.cloud_memcache.Instance.Node.State): + state (google.cloud.memcache_v1beta2.types.Instance.Node.State): Output only. Current state of the Memcached node. host (str): @@ -172,15 +176,16 @@ class Node(proto.Message): port (int): Output only. The port number of the Memcached server on this node. - parameters (~.cloud_memcache.MemcacheParameters): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): User defined parameters currently applied to the node. + update_available (bool): + Output only. Returns true if there is an + update waiting to be applied """ class State(proto.Enum): - r"""Different states of a Memcached node. - LINT.IfChange - """ + r"""Different states of a Memcached node.""" STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -199,11 +204,13 @@ class State(proto.Enum): parameters = proto.Field(proto.MESSAGE, number=6, message="MemcacheParameters",) + update_available = proto.Field(proto.BOOL, number=7) + class InstanceMessage(proto.Message): r""" Attributes: - code (~.cloud_memcache.Instance.InstanceMessage.Code): + code (google.cloud.memcache_v1beta2.types.Instance.InstanceMessage.Code): A code that correspond to one type of user- acing message. message (str): @@ -254,6 +261,8 @@ class Code(proto.Enum): discovery_endpoint = proto.Field(proto.STRING, number=20) + update_available = proto.Field(proto.BOOL, number=21) + class ListInstancesRequest(proto.Message): r"""Request for @@ -268,18 +277,18 @@ class ListInstancesRequest(proto.Message): The maximum number of items to return. If not specified, a default value of 1000 will be used by - the service. Regardless of the page_size value, the response - may include a partial list and a caller should only rely on - response's - [next_page_token][CloudMemcache.ListInstancesResponse.next_page_token] + the service. Regardless of the ``page_size`` value, the + response may include a partial list and a caller should only + rely on response's + [``next_page_token``][google.cloud.memcache.v1beta2.ListInstancesResponse.next_page_token] to determine if there are more instances left to be queried. page_token (str): - The next_page_token value returned from a previous List + The ``next_page_token`` value returned from a previous List request, if any. filter (str): - List filter. For example, exclude all - Memcached instances with name as my-instance by - specifying "name != my-instance". + List filter. For example, exclude all Memcached instances + with name as my-instance by specifying + ``"name != my-instance"``. order_by (str): Sort results. Supported values are "name", "name desc" or "" (unsorted). @@ -301,7 +310,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.memcache.v1beta2.CloudMemcache.ListInstances]. Attributes: - resources (Sequence[~.cloud_memcache.Instance]): + resources (Sequence[google.cloud.memcache_v1beta2.types.Instance]): A list of Memcached instances in the project in the specified location, or across all locations. @@ -359,8 +368,11 @@ class CreateInstanceRequest(proto.Message): - Must start with a letter. - Must be between 1-40 characters. - Must end with a number or a letter. - - Must be unique within the user project / location - resource (~.cloud_memcache.Instance): + - Must be unique within the user project / location. + + If any of the above are not met, the API raises an invalid + argument error. + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource """ @@ -376,11 +388,11 @@ class UpdateInstanceRequest(proto.Message): [UpdateInstance][google.cloud.memcache.v1beta2.CloudMemcache.UpdateInstance]. Attributes: - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - ``displayName`` - resource (~.cloud_memcache.Instance): + resource (google.cloud.memcache_v1beta2.types.Instance): Required. A Memcached [Instance] resource. Only fields specified in update_mask are updated. """ @@ -396,7 +408,7 @@ class DeleteInstanceRequest(proto.Message): Attributes: name (str): - Memcached instance resource name in the format: + Required. Memcached instance resource name in the format: ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` where ``location_id`` refers to a GCP region """ @@ -414,14 +426,13 @@ class ApplyParametersRequest(proto.Message): instance for which parameter group updates should be applied. node_ids (Sequence[str]): - Nodes to which we should apply the instance- - evel parameter group. + Nodes to which the instance-level parameter + group is applied. apply_all (bool): - Whether to apply instance-level parameter - group to all nodes. If set to true, will - explicitly restrict users from specifying any - nodes, and apply parameter group updates to all - nodes within the instance. + Whether to apply instance-level parameter group to all + nodes. If set to true, users are restricted from specifying + individual nodes, and ``ApplyParameters`` updates all nodes + within the instance. """ name = proto.Field(proto.STRING, number=1) @@ -440,9 +451,9 @@ class UpdateParametersRequest(proto.Message): Required. Resource name of the Memcached instance for which the parameters should be updated. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Mask of fields to update. - parameters (~.cloud_memcache.MemcacheParameters): + parameters (google.cloud.memcache_v1beta2.types.MemcacheParameters): The parameters to apply to the instance. """ @@ -453,18 +464,45 @@ class UpdateParametersRequest(proto.Message): parameters = proto.Field(proto.MESSAGE, number=3, message="MemcacheParameters",) +class ApplySoftwareUpdateRequest(proto.Message): + r"""Request for + [ApplySoftwareUpdate][google.cloud.memcache.v1beta2.CloudMemcache.ApplySoftwareUpdate]. + + Attributes: + instance (str): + Required. Resource name of the Memcached + instance for which software update should be + applied. + node_ids (Sequence[str]): + Nodes to which we should apply the update to. + Note all the selected nodes are updated in + parallel. + apply_all (bool): + Whether to apply the update to all nodes. If + set to true, will explicitly restrict users from + specifying any nodes, and apply software update + to all nodes (where applicable) within the + instance. + """ + + instance = proto.Field(proto.STRING, number=1) + + node_ids = proto.RepeatedField(proto.STRING, number=2) + + apply_all = proto.Field(proto.BOOL, number=3) + + class MemcacheParameters(proto.Message): - r""" + r"""The unique ID associated with this set of parameters. Users + can use this id to determine if the parameters associated with + the instance differ from the parameters associated with the + nodes. A discrepancy between parameter ids can inform users that + they may need to take action to apply parameters on nodes. Attributes: id (str): - Output only. The unique ID associated with - this set of parameters. Users can use this id to - determine if the parameters associated with the - instance differ from the parameters associated - with the nodes and any action needs to be taken - to apply parameters on nodes. - params (Sequence[~.cloud_memcache.MemcacheParameters.ParamsEntry]): + Output only. + params (Sequence[google.cloud.memcache_v1beta2.types.MemcacheParameters.ParamsEntry]): User defined set of parameters to use in the memcached process. """ @@ -478,26 +516,30 @@ class OperationMetadata(proto.Message): r"""Represents the metadata of a long-running operation. Attributes: - create_time (~.timestamp.Timestamp): - Time when the operation was created. - end_time (~.timestamp.Timestamp): - Time when the operation finished running. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the operation finished + running. target (str): - Server-defined resource path for the target - of the operation. + Output only. Server-defined resource path for + the target of the operation. verb (str): - Name of the verb executed by the operation. + Output only. Name of the verb executed by the + operation. status_detail (str): - Human-readable status of the operation, if - any. + Output only. Human-readable status of the + operation, if any. cancel_requested (bool): - Identifies whether the user has requested cancellation of - the operation. Operations that have successfully been - cancelled have [Operation.error][] value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. api_version (str): - API version used to start the operation. + Output only. API version used to start the + operation. """ create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) @@ -520,7 +562,7 @@ class LocationMetadata(proto.Message): [google.cloud.location.Location][google.cloud.location.Location]. Attributes: - available_zones (Sequence[~.cloud_memcache.LocationMetadata.AvailableZonesEntry]): + available_zones (Sequence[google.cloud.memcache_v1beta2.types.LocationMetadata.AvailableZonesEntry]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in the ``zones`` field when diff --git a/noxfile.py b/noxfile.py index 8004482..9e90799 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -75,6 +86,7 @@ def default(session): session.install( "mock", "pytest", "pytest-cov", ) + session.install("-e", ".") # Run py.test against the unit tests. diff --git a/scripts/fixup_memcache_v1_keywords.py b/scripts/fixup_memcache_v1_keywords.py new file mode 100644 index 0000000..eac442a --- /dev/null +++ b/scripts/fixup_memcache_v1_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class memcacheCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('update_mask', 'instance', ), + 'update_parameters': ('name', 'update_mask', 'parameters', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=memcacheCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the memcache client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_memcache_v1beta2_keywords.py b/scripts/fixup_memcache_v1beta2_keywords.py index 459ba63..4267b6f 100644 --- a/scripts/fixup_memcache_v1beta2_keywords.py +++ b/scripts/fixup_memcache_v1beta2_keywords.py @@ -42,6 +42,7 @@ class memcacheCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'apply_parameters': ('name', 'node_ids', 'apply_all', ), + 'apply_software_update': ('instance', 'node_ids', 'apply_all', ), 'create_instance': ('parent', 'instance_id', 'resource', ), 'delete_instance': ('name', ), 'get_instance': ('name', ), diff --git a/synth.py b/synth.py index 4b2ffa1..65a7ca3 100644 --- a/synth.py +++ b/synth.py @@ -22,7 +22,9 @@ gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -versions = ["v1beta2"] +versions = ["v1beta2", + "v1", + ] # add new versions at the end of the list # ---------------------------------------------------------------------------- # Generate memcache GAPIC layer diff --git a/tests/unit/gapic/memcache_v1/__init__.py b/tests/unit/gapic/memcache_v1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/gapic/memcache_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py new file mode 100644 index 0000000..896e3b1 --- /dev/null +++ b/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -0,0 +1,2575 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheAsyncClient +from google.cloud.memcache_v1.services.cloud_memcache import CloudMemcacheClient +from google.cloud.memcache_v1.services.cloud_memcache import pagers +from google.cloud.memcache_v1.services.cloud_memcache import transports +from google.cloud.memcache_v1.types import cloud_memcache +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudMemcacheClient._get_default_mtls_endpoint(None) is None + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudMemcacheClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test_cloud_memcache_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = CloudMemcacheClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] +) +def test_cloud_memcache_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_client_get_transport_class(): + transport = CloudMemcacheClient.get_transport_class() + available_transports = [ + transports.CloudMemcacheGrpcTransport, + ] + assert transport in available_transports + + transport = CloudMemcacheClient.get_transport_class("grpc") + assert transport == transports.CloudMemcacheGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +def test_cloud_memcache_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudMemcacheClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "true"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc", "false"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + CloudMemcacheClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheClient), +) +@mock.patch.object( + CloudMemcacheAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudMemcacheAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_memcache_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudMemcacheClient, transports.CloudMemcacheGrpcTransport, "grpc"), + ( + CloudMemcacheAsyncClient, + transports.CloudMemcacheGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cloud_memcache_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_memcache_client_client_options_from_dict(): + with mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudMemcacheClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_instances( + transport: str = "grpc", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", unreachable=["unreachable_value"], + ) + + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListInstancesPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ListInstancesRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = cloud_memcache.ListInstancesResponse() + + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ListInstancesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_instances_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_instances_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.ListInstancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + cloud_memcache.ListInstancesRequest(), parent="parent_value", + ) + + +def test_list_instances_pager(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in results) + + +def test_list_instances_pages(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_memcache.Instance) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_memcache.ListInstancesResponse( + instances=[ + cloud_memcache.Instance(), + cloud_memcache.Instance(), + cloud_memcache.Instance(), + ], + next_page_token="abc", + ), + cloud_memcache.ListInstancesResponse(instances=[], next_page_token="def",), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(),], next_page_token="ghi", + ), + cloud_memcache.ListInstancesResponse( + instances=[cloud_memcache.Instance(), cloud_memcache.Instance(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_instance( + transport: str = "grpc", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.GetInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance( + name="name_value", + display_name="display_name_value", + authorized_network="authorized_network_value", + zones=["zones_value"], + node_count=1070, + memcache_version=cloud_memcache.MemcacheVersion.MEMCACHE_1_5, + state=cloud_memcache.Instance.State.CREATING, + memcache_full_version="memcache_full_version_value", + discovery_endpoint="discovery_endpoint_value", + ) + ) + + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_memcache.Instance) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.authorized_network == "authorized_network_value" + + assert response.zones == ["zones_value"] + + assert response.node_count == 1070 + + assert response.memcache_version == cloud_memcache.MemcacheVersion.MEMCACHE_1_5 + + assert response.state == cloud_memcache.Instance.State.CREATING + + assert response.memcache_full_version == "memcache_full_version_value" + + assert response.discovery_endpoint == "discovery_endpoint_value" + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = cloud_memcache.Instance() + + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.GetInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_memcache.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_memcache.Instance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + cloud_memcache.GetInstanceRequest(), name="name_value", + ) + + +def test_create_instance( + transport: str = "grpc", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.CreateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.CreateInstanceRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].instance_id == "instance_id_value" + + +def test_create_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].instance_id == "instance_id_value" + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + cloud_memcache.CreateInstanceRequest(), + parent="parent_value", + instance=cloud_memcache.Instance(name="name_value"), + instance_id="instance_id_value", + ) + + +def test_update_instance( + transport: str = "grpc", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_from_dict(): + test_update_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateInstanceRequest() + request.instance.name = "instance.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ + "metadata" + ] + + +def test_update_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].instance == cloud_memcache.Instance(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + cloud_memcache.UpdateInstanceRequest(), + instance=cloud_memcache.Instance(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_update_parameters( + transport: str = "grpc", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_parameters_from_dict(): + test_update_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.UpdateParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.UpdateParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_parameters_async_from_dict(): + await test_update_parameters_async(request_type=dict) + + +def test_update_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.UpdateParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.update_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +def test_update_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_parameters), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_parameters( + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + assert args[0].parameters == cloud_memcache.MemcacheParameters(id="id_value") + + +@pytest.mark.asyncio +async def test_update_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_parameters( + cloud_memcache.UpdateParametersRequest(), + name="name_value", + update_mask=field_mask.FieldMask(paths=["paths_value"]), + parameters=cloud_memcache.MemcacheParameters(id="id_value"), + ) + + +def test_delete_instance( + transport: str = "grpc", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.DeleteInstanceRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.DeleteInstanceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instance_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_instance_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + cloud_memcache.DeleteInstanceRequest(), name="name_value", + ) + + +def test_apply_parameters( + transport: str = "grpc", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_parameters_from_dict(): + test_apply_parameters(request_type=dict) + + +@pytest.mark.asyncio +async def test_apply_parameters_async( + transport: str = "grpc_asyncio", request_type=cloud_memcache.ApplyParametersRequest +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplyParametersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_parameters_async_from_dict(): + await test_apply_parameters_async(request_type=dict) + + +def test_apply_parameters_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_parameters_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplyParametersRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.apply_parameters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_apply_parameters_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +def test_apply_parameters_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.apply_parameters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.apply_parameters( + name="name_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +@pytest.mark.asyncio +async def test_apply_parameters_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.apply_parameters( + cloud_memcache.ApplyParametersRequest(), + name="name_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudMemcacheClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = CloudMemcacheClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudMemcacheGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.CloudMemcacheGrpcTransport,) + + +def test_cloud_memcache_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_memcache_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudMemcacheTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "update_parameters", + "delete_instance", + "apply_parameters", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_cloud_memcache_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_memcache_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.memcache_v1.services.cloud_memcache.transports.CloudMemcacheTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudMemcacheTransport() + adc.assert_called_once() + + +def test_cloud_memcache_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + CloudMemcacheClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_cloud_memcache_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_memcache_host_no_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com" + ), + ) + assert client.transport._host == "memcache.googleapis.com:443" + + +def test_cloud_memcache_host_with_port(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="memcache.googleapis.com:8000" + ), + ) + assert client.transport._host == "memcache.googleapis.com:8000" + + +def test_cloud_memcache_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_memcache_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudMemcacheGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_memcache_grpc_lro_client(): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_memcache_grpc_lro_async_client(): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, location=location, instance=instance, + ) + actual = CloudMemcacheClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudMemcacheClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_instance_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudMemcacheClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudMemcacheClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudMemcacheClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudMemcacheClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudMemcacheClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudMemcacheClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + + expected = "projects/{project}".format(project=project,) + actual = CloudMemcacheClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudMemcacheClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudMemcacheClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudMemcacheClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudMemcacheClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudMemcacheTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudMemcacheClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 3050c18..0671fea 100644 --- a/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -92,8 +92,21 @@ def test__get_default_mtls_endpoint(): ) +def test_cloud_memcache_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = CloudMemcacheClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "memcache.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient] + "client_class", [CloudMemcacheClient, CloudMemcacheAsyncClient,] ) def test_cloud_memcache_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -112,7 +125,10 @@ def test_cloud_memcache_client_from_service_account_file(client_class): def test_cloud_memcache_client_get_transport_class(): transport = CloudMemcacheClient.get_transport_class() - assert transport == transports.CloudMemcacheGrpcTransport + available_transports = [ + transports.CloudMemcacheGrpcTransport, + ] + assert transport in available_transports transport = CloudMemcacheClient.get_transport_class("grpc") assert transport == transports.CloudMemcacheGrpcTransport @@ -163,7 +179,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -179,7 +195,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -195,7 +211,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -223,7 +239,7 @@ def test_cloud_memcache_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -274,29 +290,25 @@ def test_cloud_memcache_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -305,66 +317,53 @@ def test_cloud_memcache_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -390,7 +389,7 @@ def test_cloud_memcache_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -420,7 +419,7 @@ def test_cloud_memcache_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -439,7 +438,7 @@ def test_cloud_memcache_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -807,6 +806,7 @@ def test_get_instance( state=cloud_memcache.Instance.State.CREATING, memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", + update_available=True, ) response = client.get_instance(request) @@ -839,6 +839,8 @@ def test_get_instance( assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True + def test_get_instance_from_dict(): test_get_instance(request_type=dict) @@ -870,6 +872,7 @@ async def test_get_instance_async( state=cloud_memcache.Instance.State.CREATING, memcache_full_version="memcache_full_version_value", discovery_endpoint="discovery_endpoint_value", + update_available=True, ) ) @@ -902,6 +905,8 @@ async def test_get_instance_async( assert response.discovery_endpoint == "discovery_endpoint_value" + assert response.update_available is True + @pytest.mark.asyncio async def test_get_instance_async_from_dict(): @@ -2048,6 +2053,223 @@ async def test_apply_parameters_flattened_error_async(): ) +def test_apply_software_update( + transport: str = "grpc", request_type=cloud_memcache.ApplySoftwareUpdateRequest +): + client = CloudMemcacheClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_software_update_from_dict(): + test_apply_software_update(request_type=dict) + + +@pytest.mark.asyncio +async def test_apply_software_update_async( + transport: str = "grpc_asyncio", + request_type=cloud_memcache.ApplySoftwareUpdateRequest, +): + client = CloudMemcacheAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == cloud_memcache.ApplySoftwareUpdateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_software_update_async_from_dict(): + await test_apply_software_update_async(request_type=dict) + + +def test_apply_software_update_field_headers(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_software_update_field_headers_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_memcache.ApplySoftwareUpdateRequest() + request.instance = "instance/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.apply_software_update(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "instance=instance/value",) in kw["metadata"] + + +def test_apply_software_update_flattened(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.apply_software_update( + instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].instance == "instance_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +def test_apply_software_update_flattened_error(): + client = CloudMemcacheClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.apply_software_update( + cloud_memcache.ApplySoftwareUpdateRequest(), + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + +@pytest.mark.asyncio +async def test_apply_software_update_flattened_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_software_update), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.apply_software_update( + instance="instance_value", node_ids=["node_ids_value"], apply_all=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].instance == "instance_value" + + assert args[0].node_ids == ["node_ids_value"] + + assert args[0].apply_all == True + + +@pytest.mark.asyncio +async def test_apply_software_update_flattened_error_async(): + client = CloudMemcacheAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.apply_software_update( + cloud_memcache.ApplySoftwareUpdateRequest(), + instance="instance_value", + node_ids=["node_ids_value"], + apply_all=True, + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudMemcacheGrpcTransport( @@ -2152,6 +2374,7 @@ def test_cloud_memcache_base_transport(): "update_parameters", "delete_instance", "apply_parameters", + "apply_software_update", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2218,6 +2441,51 @@ def test_cloud_memcache_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudMemcacheGrpcTransport, + transports.CloudMemcacheGrpcAsyncIOTransport, + ], +) +def test_cloud_memcache_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_cloud_memcache_host_no_port(): client = CloudMemcacheClient( credentials=credentials.AnonymousCredentials(), @@ -2239,7 +2507,7 @@ def test_cloud_memcache_host_with_port(): def test_cloud_memcache_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcTransport( @@ -2251,7 +2519,7 @@ def test_cloud_memcache_grpc_transport_channel(): def test_cloud_memcache_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudMemcacheGrpcAsyncIOTransport( @@ -2262,6 +2530,8 @@ def test_cloud_memcache_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2274,7 +2544,7 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2312,6 +2582,8 @@ def test_cloud_memcache_transport_channel_mtls_with_client_cert_source(transport assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2327,7 +2599,7 @@ def test_cloud_memcache_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel