diff --git a/.coveragerc b/.coveragerc index 0d8e6297..fff276ec 100644 --- a/.coveragerc +++ b/.coveragerc @@ -23,16 +23,14 @@ omit = [report] fail_under = 100 show_missing = True +omit = google/cloud/language/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py - google/cloud/__init__.py + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index e70b6034..aa92ff73 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-language/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 4d745031..aa64611c 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-language/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index bf242e12..8fd22150 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-language/.kokoro/test-samples.sh" diff --git a/README.rst b/README.rst index 48558dd7..96dd41fc 100644 --- a/README.rst +++ b/README.rst @@ -62,11 +62,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 -Deprecated Python Versions -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-language=1.3.0 Mac/Linux diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 00000000..61fdb3f6 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,140 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-language` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-language/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change +The 2.0.0 release requires Python 3.6+. + +## Method Calls + +> **WARNING**: Breaking change +Methods expect request objects. We provide a script that will convert most common use cases. +* Install the library + +```py +python3 -m pip install google-cloud-language +``` + +* The script `fixup_language_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_language_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import language_v1 +language = language_v1.LanguageClient() +return language.analyze_sentiment(document=document).document_sentiment +``` + + +**After:** +```py +from google.cloud import language_v1 +language = language_v1.LanguageServiceClient() +return language.analyze_sentiment(request={'document': document}).document_sentiment +``` + +### More Details + +In `google-cloud-language<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def analyze_sentiment( + self, + document, + encoding_type=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer. + + +**After:** +```py + def analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. +Both of these calls are valid: + +```py +response = client.analyze_sentiment( + request={ + "document": document, + "encoding_type": encoding_type + } +) +``` + +```py +response = client.analyze_sentiment( + document=document, + encoding_type=encoding_type + ) # Make an API request. +``` + +This call is invalid because it mixes `request` with a keyword argument `entry_group`. Executing this code +will result in an error. + +```py +response = client.analyze_sentiment( + request={ + "document": document + }, + encoding_type=encoding_type +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change +The submodules `enums` and `types` have been removed. +**Before:** +```py +from google.cloud import language_v1 +document = language_v1.types.Document(content=text, type=language_v1.enums.Document.Type.PLAIN_TEXT) +encoding_type = language_v1.enums.EncodingType.UTF8 +``` + + +**After:** +```py +from google.cloud import language_v1 +document = language_v1.Document(content=text, type_=language_v1.Document.Type.PLAIN_TEXT) +encoding_type = language_v1.EncodingType.UTF8 +``` + +## Project Path Helper Methods + +The project path helper method `project_path` has been removed. Please construct +this path manually. + +```py +project = 'my-project' +project_path = f'projects/{project}' \ No newline at end of file diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 00000000..01097c8c --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/api.rst b/docs/api.rst index 4d714615..8720e9fa 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -7,8 +7,8 @@ API. By default, you will get ``v1``, the latest GA version. .. toctree:: :maxdepth: 2 - gapic/v1/api - gapic/v1/types + language_v1/services + language_v1/types If you are interested in beta features ahead of the latest GA, you may opt-in to the v1.1 beta, which is spelled ``v1beta2``. In order to do this, @@ -20,8 +20,18 @@ An API and type reference is provided for the v1.1 beta also: .. toctree:: :maxdepth: 2 - gapic/v1beta2/api - gapic/v1beta2/types + language_v1beta2/services + language_v1beta2/types + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING .. note:: diff --git a/docs/conf.py b/docs/conf.py index d23820ed..33d16cf7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -347,7 +347,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst deleted file mode 100644 index 2c5fd4fd..00000000 --- a/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Natural Language Client API -=========================== - -.. automodule:: google.cloud.language_v1 - :members: - :inherited-members: diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst deleted file mode 100644 index 90d27a4b..00000000 --- a/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Natural Language Client Types -============================= - -.. automodule:: google.cloud.language_v1.types - :members: diff --git a/docs/gapic/v1beta2/api.rst b/docs/gapic/v1beta2/api.rst deleted file mode 100644 index 330d7e6e..00000000 --- a/docs/gapic/v1beta2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Natural Language Beta Client API -================================ - -.. automodule:: google.cloud.language_v1beta2 - :members: - :inherited-members: diff --git a/docs/gapic/v1beta2/types.rst b/docs/gapic/v1beta2/types.rst deleted file mode 100644 index d9a7eb17..00000000 --- a/docs/gapic/v1beta2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Natural Language Beta Client Types -================================== - -.. automodule:: google.cloud.language_v1beta2.types - :members: diff --git a/docs/language_v1/services.rst b/docs/language_v1/services.rst new file mode 100644 index 00000000..e1af1f07 --- /dev/null +++ b/docs/language_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1 API +========================================= + +.. automodule:: google.cloud.language_v1.services.language_service + :members: + :inherited-members: diff --git a/docs/language_v1/types.rst b/docs/language_v1/types.rst new file mode 100644 index 00000000..befde156 --- /dev/null +++ b/docs/language_v1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Language v1 API +====================================== + +.. automodule:: google.cloud.language_v1.types + :members: diff --git a/docs/language_v1beta2/services.rst b/docs/language_v1beta2/services.rst new file mode 100644 index 00000000..275e2e7c --- /dev/null +++ b/docs/language_v1beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Language v1beta2 API +============================================== + +.. automodule:: google.cloud.language_v1beta2.services.language_service + :members: + :inherited-members: diff --git a/docs/language_v1beta2/types.rst b/docs/language_v1beta2/types.rst new file mode 100644 index 00000000..5a1c2284 --- /dev/null +++ b/docs/language_v1beta2/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Language v1beta2 API +=========================================== + +.. automodule:: google.cloud.language_v1beta2.types + :members: diff --git a/google/cloud/language/__init__.py b/google/cloud/language/__init__.py new file mode 100644 index 00000000..4426b53c --- /dev/null +++ b/google/cloud/language/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.language_v1.services.language_service.async_client import ( + LanguageServiceAsyncClient, +) +from google.cloud.language_v1.services.language_service.client import ( + LanguageServiceClient, +) +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesRequest +from google.cloud.language_v1.types.language_service import AnalyzeEntitiesResponse +from google.cloud.language_v1.types.language_service import ( + AnalyzeEntitySentimentRequest, +) +from google.cloud.language_v1.types.language_service import ( + AnalyzeEntitySentimentResponse, +) +from google.cloud.language_v1.types.language_service import AnalyzeSentimentRequest +from google.cloud.language_v1.types.language_service import AnalyzeSentimentResponse +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxRequest +from google.cloud.language_v1.types.language_service import AnalyzeSyntaxResponse +from google.cloud.language_v1.types.language_service import AnnotateTextRequest +from google.cloud.language_v1.types.language_service import AnnotateTextResponse +from google.cloud.language_v1.types.language_service import ClassificationCategory +from google.cloud.language_v1.types.language_service import ClassifyTextRequest +from google.cloud.language_v1.types.language_service import ClassifyTextResponse +from google.cloud.language_v1.types.language_service import DependencyEdge +from google.cloud.language_v1.types.language_service import Document +from google.cloud.language_v1.types.language_service import EncodingType +from google.cloud.language_v1.types.language_service import Entity +from google.cloud.language_v1.types.language_service import EntityMention +from google.cloud.language_v1.types.language_service import PartOfSpeech +from google.cloud.language_v1.types.language_service import Sentence +from google.cloud.language_v1.types.language_service import Sentiment +from google.cloud.language_v1.types.language_service import TextSpan +from google.cloud.language_v1.types.language_service import Token + +__all__ = ( + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", + "ClassificationCategory", + "ClassifyTextRequest", + "ClassifyTextResponse", + "DependencyEdge", + "Document", + "EncodingType", + "Entity", + "EntityMention", + "LanguageServiceAsyncClient", + "LanguageServiceClient", + "PartOfSpeech", + "Sentence", + "Sentiment", + "TextSpan", + "Token", +) diff --git a/google/cloud/language/py.typed b/google/cloud/language/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/google/cloud/language/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/google/cloud/language_v1/__init__.py b/google/cloud/language_v1/__init__.py index a44fe4c9..ba3826be 100644 --- a/google/cloud/language_v1/__init__.py +++ b/google/cloud/language_v1/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2017, Google LLC All rights reserved. +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,17 +13,57 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -from __future__ import absolute_import - -from google.cloud.language_v1 import types -from google.cloud.language_v1.gapic import enums -from google.cloud.language_v1.gapic import language_service_client - - -class LanguageServiceClient(language_service_client.LanguageServiceClient): - __doc__ = language_service_client.LanguageServiceClient.__doc__ - enums = enums +from .services.language_service import LanguageServiceClient +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import EncodingType +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token -__all__ = ("enums", "types", "LanguageServiceClient") +__all__ = ( + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", + "ClassificationCategory", + "ClassifyTextRequest", + "ClassifyTextResponse", + "DependencyEdge", + "Document", + "EncodingType", + "Entity", + "EntityMention", + "PartOfSpeech", + "Sentence", + "Sentiment", + "TextSpan", + "Token", + "LanguageServiceClient", +) diff --git a/google/cloud/language_v1/py.typed b/google/cloud/language_v1/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/google/cloud/language_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/tests/unit/__init__.py b/google/cloud/language_v1/services/__init__.py similarity index 90% rename from tests/unit/__init__.py rename to google/cloud/language_v1/services/__init__.py index df379f1e..42ffdf2b 100644 --- a/tests/unit/__init__.py +++ b/google/cloud/language_v1/services/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +13,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/google/cloud/__init__.py b/google/cloud/language_v1/services/language_service/__init__.py similarity index 70% rename from google/cloud/__init__.py rename to google/cloud/language_v1/services/language_service/__init__.py index 0e1bc513..d2aff222 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/language_v1/services/language_service/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,12 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "LanguageServiceClient", + "LanguageServiceAsyncClient", +) diff --git a/google/cloud/language_v1/services/language_service/async_client.py b/google/cloud/language_v1/services/language_service/async_client.py new file mode 100644 index 00000000..f7a214e9 --- /dev/null +++ b/google/cloud/language_v1/services/language_service/async_client.py @@ -0,0 +1,602 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1.types import language_service + +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + from_service_account_file = LanguageServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + Args: + request (:class:`~.language_service.AnalyzeSentimentRequest`): + The request object. The sentiment analysis request + message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate sentence offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Args: + request (:class:`~.language_service.AnalyzeEntitiesRequest`): + The request object. The entity analysis request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Args: + request (:class:`~.language_service.AnalyzeEntitySentimentRequest`): + The request object. The entity-level sentiment analysis + request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Args: + request (:class:`~.language_service.AnalyzeSyntaxRequest`): + The request object. The syntax analysis request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def classify_text( + self, + request: language_service.ClassifyTextRequest = None, + *, + document: language_service.Document = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + Args: + request (:class:`~.language_service.ClassifyTextRequest`): + The request object. The document classification request + message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def annotate_text( + self, + request: language_service.AnnotateTextRequest = None, + *, + document: language_service.Document = None, + features: language_service.AnnotateTextRequest.Features = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Args: + request (:class:`~.language_service.AnnotateTextRequest`): + The request object. The request message for the text + annotation API, which can perform multiple analysis + types (sentiment, entities, and syntax) in one call. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`~.language_service.AnnotateTextRequest.Features`): + The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, features, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LanguageServiceAsyncClient",) diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py new file mode 100644 index 00000000..1084acd3 --- /dev/null +++ b/google/cloud/language_v1/services/language_service/client.py @@ -0,0 +1,714 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1.types import language_service + +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[LanguageServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + Args: + request (:class:`~.language_service.AnalyzeSentimentRequest`): + The request object. The sentiment analysis request + message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate sentence offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Args: + request (:class:`~.language_service.AnalyzeEntitiesRequest`): + The request object. The entity analysis request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Args: + request (:class:`~.language_service.AnalyzeEntitySentimentRequest`): + The request object. The entity-level sentiment analysis + request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Args: + request (:class:`~.language_service.AnalyzeSyntaxRequest`): + The request object. The syntax analysis request message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def classify_text( + self, + request: language_service.ClassifyTextRequest = None, + *, + document: language_service.Document = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + Args: + request (:class:`~.language_service.ClassifyTextRequest`): + The request object. The document classification request + message. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def annotate_text( + self, + request: language_service.AnnotateTextRequest = None, + *, + document: language_service.Document = None, + features: language_service.AnnotateTextRequest.Features = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Args: + request (:class:`~.language_service.AnnotateTextRequest`): + The request object. The request message for the text + annotation API, which can perform multiple analysis + types (sentiment, entities, and syntax) in one call. + document (:class:`~.language_service.Document`): + Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`~.language_service.AnnotateTextRequest.Features`): + The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LanguageServiceClient",) diff --git a/google/cloud/language_v1/services/language_service/transports/__init__.py b/google/cloud/language_v1/services/language_service/transports/__init__.py new file mode 100644 index 00000000..22069335 --- /dev/null +++ b/google/cloud/language_v1/services/language_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry["grpc"] = LanguageServiceGrpcTransport +_transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + + +__all__ = ( + "LanguageServiceTransport", + "LanguageServiceGrpcTransport", + "LanguageServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/language_v1/services/language_service/transports/base.py b/google/cloud/language_v1/services/language_service/transports/base.py new file mode 100644 index 00000000..79ed44e8 --- /dev/null +++ b/google/cloud/language_v1/services/language_service/transports/base.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.language_v1.types import language_service + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ) + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + @property + def analyze_sentiment( + self, + ) -> typing.Callable[ + [language_service.AnalyzeSentimentRequest], + typing.Union[ + language_service.AnalyzeSentimentResponse, + typing.Awaitable[language_service.AnalyzeSentimentResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_entities( + self, + ) -> typing.Callable[ + [language_service.AnalyzeEntitiesRequest], + typing.Union[ + language_service.AnalyzeEntitiesResponse, + typing.Awaitable[language_service.AnalyzeEntitiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment( + self, + ) -> typing.Callable[ + [language_service.AnalyzeEntitySentimentRequest], + typing.Union[ + language_service.AnalyzeEntitySentimentResponse, + typing.Awaitable[language_service.AnalyzeEntitySentimentResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_syntax( + self, + ) -> typing.Callable[ + [language_service.AnalyzeSyntaxRequest], + typing.Union[ + language_service.AnalyzeSyntaxResponse, + typing.Awaitable[language_service.AnalyzeSyntaxResponse], + ], + ]: + raise NotImplementedError() + + @property + def classify_text( + self, + ) -> typing.Callable[ + [language_service.ClassifyTextRequest], + typing.Union[ + language_service.ClassifyTextResponse, + typing.Awaitable[language_service.ClassifyTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def annotate_text( + self, + ) -> typing.Callable[ + [language_service.AnnotateTextRequest], + typing.Union[ + language_service.AnnotateTextResponse, + typing.Awaitable[language_service.AnnotateTextResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("LanguageServiceTransport",) diff --git a/google/cloud/language_v1/services/language_service/transports/grpc.py b/google/cloud/language_v1/services/language_service/transports/grpc.py new file mode 100644 index 00000000..73608a10 --- /dev/null +++ b/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse, + ]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_sentiment" not in self._stubs: + self._stubs["analyze_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs["analyze_sentiment"] + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse, + ]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entities" not in self._stubs: + self._stubs["analyze_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeEntities", + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs["analyze_entities"] + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse, + ]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entity_sentiment" not in self._stubs: + self._stubs["analyze_entity_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment", + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs["analyze_entity_sentiment"] + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], language_service.AnalyzeSyntaxResponse + ]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_syntax" not in self._stubs: + self._stubs["analyze_syntax"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs["analyze_syntax"] + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], language_service.ClassifyTextResponse + ]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "classify_text" not in self._stubs: + self._stubs["classify_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/ClassifyText", + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs["classify_text"] + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], language_service.AnnotateTextResponse + ]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_text" not in self._stubs: + self._stubs["annotate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnnotateText", + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs["annotate_text"] + + +__all__ = ("LanguageServiceGrpcTransport",) diff --git a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..b55e8c8b --- /dev/null +++ b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse], + ]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_sentiment" not in self._stubs: + self._stubs["analyze_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs["analyze_sentiment"] + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse], + ]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entities" not in self._stubs: + self._stubs["analyze_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeEntities", + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs["analyze_entities"] + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse], + ]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entity_sentiment" not in self._stubs: + self._stubs["analyze_entity_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment", + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs["analyze_entity_sentiment"] + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse], + ]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part of speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_syntax" not in self._stubs: + self._stubs["analyze_syntax"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs["analyze_syntax"] + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse], + ]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "classify_text" not in self._stubs: + self._stubs["classify_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/ClassifyText", + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs["classify_text"] + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse], + ]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all the features + that analyzeSentiment, analyzeEntities, and + analyzeSyntax provide in one call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_text" not in self._stubs: + self._stubs["annotate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1.LanguageService/AnnotateText", + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs["annotate_text"] + + +__all__ = ("LanguageServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/language_v1/types/__init__.py b/google/cloud/language_v1/types/__init__.py new file mode 100644 index 00000000..f44df83e --- /dev/null +++ b/google/cloud/language_v1/types/__init__.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .language_service import ( + Document, + Sentence, + Entity, + Token, + Sentiment, + PartOfSpeech, + DependencyEdge, + EntityMention, + TextSpan, + ClassificationCategory, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + ClassifyTextRequest, + ClassifyTextResponse, + AnnotateTextRequest, + AnnotateTextResponse, +) + + +__all__ = ( + "Document", + "Sentence", + "Entity", + "Token", + "Sentiment", + "PartOfSpeech", + "DependencyEdge", + "EntityMention", + "TextSpan", + "ClassificationCategory", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "ClassifyTextRequest", + "ClassifyTextResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", +) diff --git a/google/cloud/language_v1/types/language_service.py b/google/cloud/language_v1/types/language_service.py new file mode 100644 index 00000000..10664a54 --- /dev/null +++ b/google/cloud/language_v1/types/language_service.py @@ -0,0 +1,879 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.language.v1", + manifest={ + "EncodingType", + "Document", + "Sentence", + "Entity", + "Token", + "Sentiment", + "PartOfSpeech", + "DependencyEdge", + "EntityMention", + "TextSpan", + "ClassificationCategory", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "ClassifyTextRequest", + "ClassifyTextResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + Attributes: + type_ (~.language_service.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + """ + + class Type(proto.Enum): + r"""The document types enum.""" + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + type_ = proto.Field(proto.ENUM, number=1, enum=Type,) + + content = proto.Field(proto.STRING, number=2, oneof="source") + + gcs_content_uri = proto.Field(proto.STRING, number=3, oneof="source") + + language = proto.Field(proto.STRING, number=4) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (~.language_service.TextSpan): + The sentence text. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + sentiment = proto.Field(proto.MESSAGE, number=2, message="Sentiment",) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (~.language_service.Entity.Type): + The entity type. + metadata (Sequence[~.language_service.Entity.MetadataEntry]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (Sequence[~.language_service.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name = proto.Field(proto.STRING, number=1) + + type_ = proto.Field(proto.ENUM, number=2, enum=Type,) + + metadata = proto.MapField(proto.STRING, proto.STRING, number=3) + + salience = proto.Field(proto.FLOAT, number=4) + + mentions = proto.RepeatedField(proto.MESSAGE, number=5, message="EntityMention",) + + sentiment = proto.Field(proto.MESSAGE, number=6, message="Sentiment",) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (~.language_service.TextSpan): + The token text. + part_of_speech (~.language_service.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (~.language_service.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + part_of_speech = proto.Field(proto.MESSAGE, number=2, message="PartOfSpeech",) + + dependency_edge = proto.Field(proto.MESSAGE, number=3, message="DependencyEdge",) + + lemma = proto.Field(proto.STRING, number=4) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude = proto.Field(proto.FLOAT, number=2) + + score = proto.Field(proto.FLOAT, number=3) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. Parts of speech + are as defined in + http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf + + Attributes: + tag (~.language_service.PartOfSpeech.Tag): + The part of speech tag. + aspect (~.language_service.PartOfSpeech.Aspect): + The grammatical aspect. + case (~.language_service.PartOfSpeech.Case): + The grammatical case. + form (~.language_service.PartOfSpeech.Form): + The grammatical form. + gender (~.language_service.PartOfSpeech.Gender): + The grammatical gender. + mood (~.language_service.PartOfSpeech.Mood): + The grammatical mood. + number (~.language_service.PartOfSpeech.Number): + The grammatical number. + person (~.language_service.PartOfSpeech.Person): + The grammatical person. + proper (~.language_service.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (~.language_service.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (~.language_service.PartOfSpeech.Tense): + The grammatical tense. + voice (~.language_service.PartOfSpeech.Voice): + The grammatical voice. + """ + + class Tag(proto.Enum): + r"""The part of speech tags enum.""" + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions.""" + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name.""" + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun.""" + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference.""" + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag = proto.Field(proto.ENUM, number=1, enum=Tag,) + + aspect = proto.Field(proto.ENUM, number=2, enum=Aspect,) + + case = proto.Field(proto.ENUM, number=3, enum=Case,) + + form = proto.Field(proto.ENUM, number=4, enum=Form,) + + gender = proto.Field(proto.ENUM, number=5, enum=Gender,) + + mood = proto.Field(proto.ENUM, number=6, enum=Mood,) + + number = proto.Field(proto.ENUM, number=7, enum=Number,) + + person = proto.Field(proto.ENUM, number=8, enum=Person,) + + proper = proto.Field(proto.ENUM, number=9, enum=Proper,) + + reciprocity = proto.Field(proto.ENUM, number=10, enum=Reciprocity,) + + tense = proto.Field(proto.ENUM, number=11, enum=Tense,) + + voice = proto.Field(proto.ENUM, number=12, enum=Voice,) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + (For more information on dependency labels, see + http://www.aclweb.org/anthology/P13-2017 + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (~.language_service.DependencyEdge.Label): + The parse label for the token. + """ + + class Label(proto.Enum): + r"""The parse label enum for the token.""" + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index = proto.Field(proto.INT32, number=1) + + label = proto.Field(proto.ENUM, number=2, enum=Label,) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (~.language_service.TextSpan): + The mention text. + type_ (~.language_service.EntityMention.Type): + The type of the entity mention. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + + class Type(proto.Enum): + r"""The supported types of mentions.""" + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + type_ = proto.Field(proto.ENUM, number=2, enum=Type,) + + sentiment = proto.Field(proto.MESSAGE, number=3, message=Sentiment,) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1.EncodingType] + specified in the API request. + """ + + content = proto.Field(proto.STRING, number=1) + + begin_offset = proto.Field(proto.INT32, number=2) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the document, from the + `predefined + taxonomy `__. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name = proto.Field(proto.STRING, number=1) + + confidence = proto.Field(proto.FLOAT, number=2) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (~.language_service.Document): + Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate sentence offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (~.language_service.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + sentences (Sequence[~.language_service.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment = proto.Field(proto.MESSAGE, number=1, message=Sentiment,) + + language = proto.Field(proto.STRING, number=2) + + sentences = proto.RepeatedField(proto.MESSAGE, number=3, message=Sentence,) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (~.language_service.Document): + Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (Sequence[~.language_service.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities = proto.RepeatedField(proto.MESSAGE, number=1, message=Entity,) + + language = proto.Field(proto.STRING, number=2) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (~.language_service.Document): + Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (Sequence[~.language_service.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + entities = proto.RepeatedField(proto.MESSAGE, number=1, message=Entity,) + + language = proto.Field(proto.STRING, number=2) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (~.language_service.Document): + Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (Sequence[~.language_service.Sentence]): + Sentences in the input document. + tokens (Sequence[~.language_service.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + """ + + sentences = proto.RepeatedField(proto.MESSAGE, number=1, message=Sentence,) + + tokens = proto.RepeatedField(proto.MESSAGE, number=2, message=Token,) + + language = proto.Field(proto.STRING, number=3) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (~.language_service.Document): + Input document. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (Sequence[~.language_service.ClassificationCategory]): + Categories representing the input document. + """ + + categories = proto.RepeatedField( + proto.MESSAGE, number=1, message=ClassificationCategory, + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (~.language_service.Document): + Input document. + features (~.language_service.AnnotateTextRequest.Features): + The enabled features. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. + """ + + extract_syntax = proto.Field(proto.BOOL, number=1) + + extract_entities = proto.Field(proto.BOOL, number=2) + + extract_document_sentiment = proto.Field(proto.BOOL, number=3) + + extract_entity_sentiment = proto.Field(proto.BOOL, number=4) + + classify_text = proto.Field(proto.BOOL, number=6) + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + features = proto.Field(proto.MESSAGE, number=2, message=Features,) + + encoding_type = proto.Field(proto.ENUM, number=3, enum="EncodingType",) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (Sequence[~.language_service.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + tokens (Sequence[~.language_service.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + entities (Sequence[~.language_service.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (~.language_service.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1.Document.language] + field for more details. + categories (Sequence[~.language_service.ClassificationCategory]): + Categories identified in the input document. + """ + + sentences = proto.RepeatedField(proto.MESSAGE, number=1, message=Sentence,) + + tokens = proto.RepeatedField(proto.MESSAGE, number=2, message=Token,) + + entities = proto.RepeatedField(proto.MESSAGE, number=3, message=Entity,) + + document_sentiment = proto.Field(proto.MESSAGE, number=4, message=Sentiment,) + + language = proto.Field(proto.STRING, number=5) + + categories = proto.RepeatedField( + proto.MESSAGE, number=6, message=ClassificationCategory, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/language_v1beta2/__init__.py b/google/cloud/language_v1beta2/__init__.py index d980c998..ba3826be 100644 --- a/google/cloud/language_v1beta2/__init__.py +++ b/google/cloud/language_v1beta2/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2017, Google LLC All rights reserved. +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,17 +13,57 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -from __future__ import absolute_import - -from google.cloud.language_v1beta2 import types -from google.cloud.language_v1beta2.gapic import enums -from google.cloud.language_v1beta2.gapic import language_service_client - - -class LanguageServiceClient(language_service_client.LanguageServiceClient): - __doc__ = language_service_client.LanguageServiceClient.__doc__ - enums = enums +from .services.language_service import LanguageServiceClient +from .types.language_service import AnalyzeEntitiesRequest +from .types.language_service import AnalyzeEntitiesResponse +from .types.language_service import AnalyzeEntitySentimentRequest +from .types.language_service import AnalyzeEntitySentimentResponse +from .types.language_service import AnalyzeSentimentRequest +from .types.language_service import AnalyzeSentimentResponse +from .types.language_service import AnalyzeSyntaxRequest +from .types.language_service import AnalyzeSyntaxResponse +from .types.language_service import AnnotateTextRequest +from .types.language_service import AnnotateTextResponse +from .types.language_service import ClassificationCategory +from .types.language_service import ClassifyTextRequest +from .types.language_service import ClassifyTextResponse +from .types.language_service import DependencyEdge +from .types.language_service import Document +from .types.language_service import EncodingType +from .types.language_service import Entity +from .types.language_service import EntityMention +from .types.language_service import PartOfSpeech +from .types.language_service import Sentence +from .types.language_service import Sentiment +from .types.language_service import TextSpan +from .types.language_service import Token -__all__ = ("enums", "types", "LanguageServiceClient") +__all__ = ( + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", + "ClassificationCategory", + "ClassifyTextRequest", + "ClassifyTextResponse", + "DependencyEdge", + "Document", + "EncodingType", + "Entity", + "EntityMention", + "PartOfSpeech", + "Sentence", + "Sentiment", + "TextSpan", + "Token", + "LanguageServiceClient", +) diff --git a/google/cloud/language_v1beta2/py.typed b/google/cloud/language_v1beta2/py.typed new file mode 100644 index 00000000..c0acc99a --- /dev/null +++ b/google/cloud/language_v1beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-language package uses inline types. diff --git a/google/__init__.py b/google/cloud/language_v1beta2/services/__init__.py similarity index 73% rename from google/__init__.py rename to google/cloud/language_v1beta2/services/__init__.py index 0e1bc513..42ffdf2b 100644 --- a/google/__init__.py +++ b/google/cloud/language_v1beta2/services/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,12 +13,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +# diff --git a/google/cloud/language.py b/google/cloud/language_v1beta2/services/language_service/__init__.py similarity index 65% rename from google/cloud/language.py rename to google/cloud/language_v1beta2/services/language_service/__init__.py index 624bd119..d2aff222 100644 --- a/google/cloud/language.py +++ b/google/cloud/language_v1beta2/services/language_service/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2017, Google LLC All rights reserved. +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,11 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -from __future__ import absolute_import - -from google.cloud.language_v1 import LanguageServiceClient -from google.cloud.language_v1 import enums -from google.cloud.language_v1 import types +from .client import LanguageServiceClient +from .async_client import LanguageServiceAsyncClient -__all__ = ("enums", "types", "LanguageServiceClient") +__all__ = ( + "LanguageServiceClient", + "LanguageServiceAsyncClient", +) diff --git a/google/cloud/language_v1beta2/services/language_service/async_client.py b/google/cloud/language_v1beta2/services/language_service/async_client.py new file mode 100644 index 00000000..0c2f1c99 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -0,0 +1,603 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .client import LanguageServiceClient + + +class LanguageServiceAsyncClient: + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + _client: LanguageServiceClient + + DEFAULT_ENDPOINT = LanguageServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LanguageServiceClient.DEFAULT_MTLS_ENDPOINT + + from_service_account_file = LanguageServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(LanguageServiceClient).get_transport_class, type(LanguageServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, LanguageServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = LanguageServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + Args: + request (:class:`~.language_service.AnalyzeSentimentRequest`): + The request object. The sentiment analysis request + message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Args: + request (:class:`~.language_service.AnalyzeEntitiesRequest`): + The request object. The entity analysis request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entities, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Args: + request (:class:`~.language_service.AnalyzeEntitySentimentRequest`): + The request object. The entity-level sentiment analysis + request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_entity_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part-of-speech + tags, dependency trees, and other properties. + + Args: + request (:class:`~.language_service.AnalyzeSyntaxRequest`): + The request object. The syntax analysis request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_syntax, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def classify_text( + self, + request: language_service.ClassifyTextRequest = None, + *, + document: language_service.Document = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + Args: + request (:class:`~.language_service.ClassifyTextRequest`): + The request object. The document classification request + message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.classify_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def annotate_text( + self, + request: language_service.AnnotateTextRequest = None, + *, + document: language_service.Document = None, + features: language_service.AnnotateTextRequest.Features = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Args: + request (:class:`~.language_service.AnnotateTextRequest`): + The request object. The request message for the text + annotation API, which can perform multiple analysis + types (sentiment, entities, and syntax) in one call. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`~.language_service.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([document, features, encoding_type]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LanguageServiceAsyncClient",) diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py new file mode 100644 index 00000000..c2d85031 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/client.py @@ -0,0 +1,715 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import LanguageServiceGrpcTransport +from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport + + +class LanguageServiceClientMeta(type): + """Metaclass for the LanguageService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[LanguageServiceTransport]] + _transport_registry["grpc"] = LanguageServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[LanguageServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LanguageServiceClient(metaclass=LanguageServiceClientMeta): + """Provides text analysis operations such as sentiment analysis + and entity recognition. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "language.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, LanguageServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the language service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LanguageServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LanguageServiceTransport): + # transport is a LanguageServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Analyzes the sentiment of the provided text. + + Args: + request (:class:`~.language_service.AnalyzeSentimentRequest`): + The request object. The sentiment analysis request + message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate sentence offsets for the + sentence sentiment. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSentimentRequest): + request = language_service.AnalyzeSentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_sentiment] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Args: + request (:class:`~.language_service.AnalyzeEntitiesRequest`): + The request object. The entity analysis request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitiesRequest): + request = language_service.AnalyzeEntitiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entities] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Args: + request (:class:`~.language_service.AnalyzeEntitySentimentRequest`): + The request object. The entity-level sentiment analysis + request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeEntitySentimentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeEntitySentimentRequest): + request = language_service.AnalyzeEntitySentimentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_entity_sentiment] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest = None, + *, + document: language_service.Document = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part-of-speech + tags, dependency trees, and other properties. + + Args: + request (:class:`~.language_service.AnalyzeSyntaxRequest`): + The request object. The syntax analysis request message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnalyzeSyntaxRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnalyzeSyntaxRequest): + request = language_service.AnalyzeSyntaxRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_syntax] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def classify_text( + self, + request: language_service.ClassifyTextRequest = None, + *, + document: language_service.Document = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Classifies a document into categories. + + Args: + request (:class:`~.language_service.ClassifyTextRequest`): + The request object. The document classification request + message. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.ClassifyTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.ClassifyTextRequest): + request = language_service.ClassifyTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.classify_text] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def annotate_text( + self, + request: language_service.AnnotateTextRequest = None, + *, + document: language_service.Document = None, + features: language_service.AnnotateTextRequest.Features = None, + encoding_type: language_service.EncodingType = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Args: + request (:class:`~.language_service.AnnotateTextRequest`): + The request object. The request message for the text + annotation API, which can perform multiple analysis + types (sentiment, entities, and syntax) in one call. + document (:class:`~.language_service.Document`): + Required. Input document. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`~.language_service.AnnotateTextRequest.Features`): + Required. The enabled features. + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + encoding_type (:class:`~.language_service.EncodingType`): + The encoding type used by the API to + calculate offsets. + This corresponds to the ``encoding_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, features, encoding_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a language_service.AnnotateTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, language_service.AnnotateTextRequest): + request = language_service.AnnotateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if document is not None: + request.document = document + if features is not None: + request.features = features + if encoding_type is not None: + request.encoding_type = encoding_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_text] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("LanguageServiceClient",) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/google/cloud/language_v1beta2/services/language_service/transports/__init__.py new file mode 100644 index 00000000..22069335 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import LanguageServiceTransport +from .grpc import LanguageServiceGrpcTransport +from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] +_transport_registry["grpc"] = LanguageServiceGrpcTransport +_transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + + +__all__ = ( + "LanguageServiceTransport", + "LanguageServiceGrpcTransport", + "LanguageServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/base.py b/google/cloud/language_v1beta2/services/language_service/transports/base.py new file mode 100644 index 00000000..aa6eb5d0 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.language_v1beta2.types import language_service + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-language",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class LanguageServiceTransport(abc.ABC): + """Abstract transport class for LanguageService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ) + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.analyze_sentiment: gapic_v1.method.wrap_method( + self.analyze_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entities: gapic_v1.method.wrap_method( + self.analyze_entities, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_entity_sentiment: gapic_v1.method.wrap_method( + self.analyze_entity_sentiment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.analyze_syntax: gapic_v1.method.wrap_method( + self.analyze_syntax, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.classify_text: gapic_v1.method.wrap_method( + self.classify_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.annotate_text: gapic_v1.method.wrap_method( + self.annotate_text, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + @property + def analyze_sentiment( + self, + ) -> typing.Callable[ + [language_service.AnalyzeSentimentRequest], + typing.Union[ + language_service.AnalyzeSentimentResponse, + typing.Awaitable[language_service.AnalyzeSentimentResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_entities( + self, + ) -> typing.Callable[ + [language_service.AnalyzeEntitiesRequest], + typing.Union[ + language_service.AnalyzeEntitiesResponse, + typing.Awaitable[language_service.AnalyzeEntitiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_entity_sentiment( + self, + ) -> typing.Callable[ + [language_service.AnalyzeEntitySentimentRequest], + typing.Union[ + language_service.AnalyzeEntitySentimentResponse, + typing.Awaitable[language_service.AnalyzeEntitySentimentResponse], + ], + ]: + raise NotImplementedError() + + @property + def analyze_syntax( + self, + ) -> typing.Callable[ + [language_service.AnalyzeSyntaxRequest], + typing.Union[ + language_service.AnalyzeSyntaxResponse, + typing.Awaitable[language_service.AnalyzeSyntaxResponse], + ], + ]: + raise NotImplementedError() + + @property + def classify_text( + self, + ) -> typing.Callable[ + [language_service.ClassifyTextRequest], + typing.Union[ + language_service.ClassifyTextResponse, + typing.Awaitable[language_service.ClassifyTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def annotate_text( + self, + ) -> typing.Callable[ + [language_service.AnnotateTextRequest], + typing.Union[ + language_service.AnnotateTextResponse, + typing.Awaitable[language_service.AnnotateTextResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("LanguageServiceTransport",) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py new file mode 100644 index 00000000..dd734bc0 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO + + +class LanguageServiceGrpcTransport(LanguageServiceTransport): + """gRPC backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse, + ]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + ~.AnalyzeSentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_sentiment" not in self._stubs: + self._stubs["analyze_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs["analyze_sentiment"] + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse, + ]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + ~.AnalyzeEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entities" not in self._stubs: + self._stubs["analyze_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs["analyze_entities"] + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse, + ]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + ~.AnalyzeEntitySentimentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entity_sentiment" not in self._stubs: + self._stubs["analyze_entity_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs["analyze_entity_sentiment"] + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], language_service.AnalyzeSyntaxResponse + ]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part-of-speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + ~.AnalyzeSyntaxResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_syntax" not in self._stubs: + self._stubs["analyze_syntax"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs["analyze_syntax"] + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], language_service.ClassifyTextResponse + ]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + ~.ClassifyTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "classify_text" not in self._stubs: + self._stubs["classify_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/ClassifyText", + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs["classify_text"] + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], language_service.AnnotateTextResponse + ]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + ~.AnnotateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_text" not in self._stubs: + self._stubs["annotate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnnotateText", + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs["annotate_text"] + + +__all__ = ("LanguageServiceGrpcTransport",) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..7898ec3f --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -0,0 +1,418 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.language_v1beta2.types import language_service + +from .base import LanguageServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import LanguageServiceGrpcTransport + + +class LanguageServiceGrpcAsyncIOTransport(LanguageServiceTransport): + """gRPC AsyncIO backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + Awaitable[language_service.AnalyzeSentimentResponse], + ]: + r"""Return a callable for the analyze sentiment method over gRPC. + + Analyzes the sentiment of the provided text. + + Returns: + Callable[[~.AnalyzeSentimentRequest], + Awaitable[~.AnalyzeSentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_sentiment" not in self._stubs: + self._stubs["analyze_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", + request_serializer=language_service.AnalyzeSentimentRequest.serialize, + response_deserializer=language_service.AnalyzeSentimentResponse.deserialize, + ) + return self._stubs["analyze_sentiment"] + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + Awaitable[language_service.AnalyzeEntitiesResponse], + ]: + r"""Return a callable for the analyze entities method over gRPC. + + Finds named entities (currently proper names and + common nouns) in the text along with entity types, + salience, mentions for each entity, and other + properties. + + Returns: + Callable[[~.AnalyzeEntitiesRequest], + Awaitable[~.AnalyzeEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entities" not in self._stubs: + self._stubs["analyze_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", + request_serializer=language_service.AnalyzeEntitiesRequest.serialize, + response_deserializer=language_service.AnalyzeEntitiesResponse.deserialize, + ) + return self._stubs["analyze_entities"] + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + Awaitable[language_service.AnalyzeEntitySentimentResponse], + ]: + r"""Return a callable for the analyze entity sentiment method over gRPC. + + Finds entities, similar to + [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + in the text and analyzes sentiment associated with each entity + and its mentions. + + Returns: + Callable[[~.AnalyzeEntitySentimentRequest], + Awaitable[~.AnalyzeEntitySentimentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_entity_sentiment" not in self._stubs: + self._stubs["analyze_entity_sentiment"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", + request_serializer=language_service.AnalyzeEntitySentimentRequest.serialize, + response_deserializer=language_service.AnalyzeEntitySentimentResponse.deserialize, + ) + return self._stubs["analyze_entity_sentiment"] + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], + Awaitable[language_service.AnalyzeSyntaxResponse], + ]: + r"""Return a callable for the analyze syntax method over gRPC. + + Analyzes the syntax of the text and provides sentence + boundaries and tokenization along with part-of-speech + tags, dependency trees, and other properties. + + Returns: + Callable[[~.AnalyzeSyntaxRequest], + Awaitable[~.AnalyzeSyntaxResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "analyze_syntax" not in self._stubs: + self._stubs["analyze_syntax"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", + request_serializer=language_service.AnalyzeSyntaxRequest.serialize, + response_deserializer=language_service.AnalyzeSyntaxResponse.deserialize, + ) + return self._stubs["analyze_syntax"] + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], + Awaitable[language_service.ClassifyTextResponse], + ]: + r"""Return a callable for the classify text method over gRPC. + + Classifies a document into categories. + + Returns: + Callable[[~.ClassifyTextRequest], + Awaitable[~.ClassifyTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "classify_text" not in self._stubs: + self._stubs["classify_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/ClassifyText", + request_serializer=language_service.ClassifyTextRequest.serialize, + response_deserializer=language_service.ClassifyTextResponse.deserialize, + ) + return self._stubs["classify_text"] + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], + Awaitable[language_service.AnnotateTextResponse], + ]: + r"""Return a callable for the annotate text method over gRPC. + + A convenience method that provides all syntax, + sentiment, entity, and classification features in one + call. + + Returns: + Callable[[~.AnnotateTextRequest], + Awaitable[~.AnnotateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_text" not in self._stubs: + self._stubs["annotate_text"] = self.grpc_channel.unary_unary( + "/google.cloud.language.v1beta2.LanguageService/AnnotateText", + request_serializer=language_service.AnnotateTextRequest.serialize, + response_deserializer=language_service.AnnotateTextResponse.deserialize, + ) + return self._stubs["annotate_text"] + + +__all__ = ("LanguageServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/language_v1beta2/types/__init__.py b/google/cloud/language_v1beta2/types/__init__.py new file mode 100644 index 00000000..f44df83e --- /dev/null +++ b/google/cloud/language_v1beta2/types/__init__.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .language_service import ( + Document, + Sentence, + Entity, + Token, + Sentiment, + PartOfSpeech, + DependencyEdge, + EntityMention, + TextSpan, + ClassificationCategory, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeEntitiesRequest, + AnalyzeEntitiesResponse, + AnalyzeSyntaxRequest, + AnalyzeSyntaxResponse, + ClassifyTextRequest, + ClassifyTextResponse, + AnnotateTextRequest, + AnnotateTextResponse, +) + + +__all__ = ( + "Document", + "Sentence", + "Entity", + "Token", + "Sentiment", + "PartOfSpeech", + "DependencyEdge", + "EntityMention", + "TextSpan", + "ClassificationCategory", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "ClassifyTextRequest", + "ClassifyTextResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", +) diff --git a/google/cloud/language_v1beta2/types/language_service.py b/google/cloud/language_v1beta2/types/language_service.py new file mode 100644 index 00000000..411dd8ee --- /dev/null +++ b/google/cloud/language_v1beta2/types/language_service.py @@ -0,0 +1,880 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.language.v1beta2", + manifest={ + "EncodingType", + "Document", + "Sentence", + "Entity", + "Token", + "Sentiment", + "PartOfSpeech", + "DependencyEdge", + "EntityMention", + "TextSpan", + "ClassificationCategory", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeEntitiesRequest", + "AnalyzeEntitiesResponse", + "AnalyzeSyntaxRequest", + "AnalyzeSyntaxResponse", + "ClassifyTextRequest", + "ClassifyTextResponse", + "AnnotateTextRequest", + "AnnotateTextResponse", + }, +) + + +class EncodingType(proto.Enum): + r"""Represents the text encoding that the caller uses to process the + output. Providing an ``EncodingType`` is recommended because the API + provides the beginning offsets for various outputs, such as tokens + and mentions, and languages that natively use different text + encodings may access offsets differently. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(proto.Message): + r"""Represents the input to API methods. + + Attributes: + type_ (~.language_service.Document.Type): + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + content (str): + The content of the input in string format. + Cloud audit logging exempt since it is based on + user data. + gcs_content_uri (str): + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket_name/object_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language (str): + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language + codes are accepted. `Language + Support `__ + lists currently supported languages for each API method. If + the language (either specified by the caller or + automatically detected) is not supported by the called API + method, an ``INVALID_ARGUMENT`` error is returned. + """ + + class Type(proto.Enum): + r"""The document types enum.""" + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + type_ = proto.Field(proto.ENUM, number=1, enum=Type,) + + content = proto.Field(proto.STRING, number=2, oneof="source") + + gcs_content_uri = proto.Field(proto.STRING, number=3, oneof="source") + + language = proto.Field(proto.STRING, number=4) + + +class Sentence(proto.Message): + r"""Represents a sentence in the input document. + + Attributes: + text (~.language_service.TextSpan): + The sentence text. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeSentiment][] or if + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] + is set to true, this field will contain the sentiment for + the sentence. + """ + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + sentiment = proto.Field(proto.MESSAGE, number=2, message="Sentiment",) + + +class Entity(proto.Message): + r"""Represents a phrase in the text that is a known entity, such + as a person, an organization, or location. The API associates + information, such as salience and mentions, with entities. + + Attributes: + name (str): + The representative name for the entity. + type_ (~.language_service.Entity.Type): + The entity type. + metadata (Sequence[~.language_service.Entity.MetadataEntry]): + Metadata associated with the entity. + + For most entity types, the metadata is a Wikipedia URL + (``wikipedia_url``) and Knowledge Graph MID (``mid``), if + they are available. For the metadata associated with other + entity types, see the Type table below. + salience (float): + The salience score associated with the entity in the [0, + 1.0] range. + + The salience score for an entity provides information about + the importance or centrality of that entity to the entire + document text. Scores closer to 0 are less salient, while + scores closer to 1.0 are highly salient. + mentions (Sequence[~.language_service.EntityMention]): + The mentions of this entity in the input + document. The API currently supports proper noun + mentions. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided + document. + """ + + class Type(proto.Enum): + r"""The type of the entity. For most entity types, the associated + metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph + MID (``mid``). The table below lists the associated fields for + entities that have different metadata. + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 + + name = proto.Field(proto.STRING, number=1) + + type_ = proto.Field(proto.ENUM, number=2, enum=Type,) + + metadata = proto.MapField(proto.STRING, proto.STRING, number=3) + + salience = proto.Field(proto.FLOAT, number=4) + + mentions = proto.RepeatedField(proto.MESSAGE, number=5, message="EntityMention",) + + sentiment = proto.Field(proto.MESSAGE, number=6, message="Sentiment",) + + +class Token(proto.Message): + r"""Represents the smallest syntactic building block of the text. + + Attributes: + text (~.language_service.TextSpan): + The token text. + part_of_speech (~.language_service.PartOfSpeech): + Parts of speech tag for this token. + dependency_edge (~.language_service.DependencyEdge): + Dependency tree parse for this token. + lemma (str): + `Lemma `__ + of the token. + """ + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + part_of_speech = proto.Field(proto.MESSAGE, number=2, message="PartOfSpeech",) + + dependency_edge = proto.Field(proto.MESSAGE, number=3, message="DependencyEdge",) + + lemma = proto.Field(proto.STRING, number=4) + + +class Sentiment(proto.Message): + r"""Represents the feeling associated with the entire text or + entities in the text. + Next ID: 6 + + Attributes: + magnitude (float): + A non-negative number in the [0, +inf) range, which + represents the absolute magnitude of sentiment regardless of + score (positive or negative). + score (float): + Sentiment score between -1.0 (negative + sentiment) and 1.0 (positive sentiment). + """ + + magnitude = proto.Field(proto.FLOAT, number=2) + + score = proto.Field(proto.FLOAT, number=3) + + +class PartOfSpeech(proto.Message): + r"""Represents part of speech information for a token. + + Attributes: + tag (~.language_service.PartOfSpeech.Tag): + The part of speech tag. + aspect (~.language_service.PartOfSpeech.Aspect): + The grammatical aspect. + case (~.language_service.PartOfSpeech.Case): + The grammatical case. + form (~.language_service.PartOfSpeech.Form): + The grammatical form. + gender (~.language_service.PartOfSpeech.Gender): + The grammatical gender. + mood (~.language_service.PartOfSpeech.Mood): + The grammatical mood. + number (~.language_service.PartOfSpeech.Number): + The grammatical number. + person (~.language_service.PartOfSpeech.Person): + The grammatical person. + proper (~.language_service.PartOfSpeech.Proper): + The grammatical properness. + reciprocity (~.language_service.PartOfSpeech.Reciprocity): + The grammatical reciprocity. + tense (~.language_service.PartOfSpeech.Tense): + The grammatical tense. + voice (~.language_service.PartOfSpeech.Voice): + The grammatical voice. + """ + + class Tag(proto.Enum): + r"""The part of speech tags enum.""" + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(proto.Enum): + r"""The characteristic of a verb that expresses time flow during + an event. + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(proto.Enum): + r"""The grammatical function performed by a noun or pronoun in a + phrase, clause, or sentence. In some languages, other parts of + speech, such as adjective and determiner, take case inflection + in agreement with the noun. + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(proto.Enum): + r"""Depending on the language, Form can be categorizing different + forms of verbs, adjectives, adverbs, etc. For example, + categorizing inflected endings of verbs and adjectives or + distinguishing between short and long forms of adjectives and + participles + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(proto.Enum): + r"""Gender classes of nouns reflected in the behaviour of + associated words. + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(proto.Enum): + r"""The grammatical feature of verbs, used for showing modality + and attitude. + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(proto.Enum): + r"""Count distinctions.""" + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(proto.Enum): + r"""The distinction between the speaker, second person, third + person, etc. + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(proto.Enum): + r"""This category shows if the token is part of a proper name.""" + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(proto.Enum): + r"""Reciprocal features of a pronoun.""" + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(proto.Enum): + r"""Time reference.""" + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(proto.Enum): + r"""The relationship between the action that a verb expresses and + the participants identified by its arguments. + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + tag = proto.Field(proto.ENUM, number=1, enum=Tag,) + + aspect = proto.Field(proto.ENUM, number=2, enum=Aspect,) + + case = proto.Field(proto.ENUM, number=3, enum=Case,) + + form = proto.Field(proto.ENUM, number=4, enum=Form,) + + gender = proto.Field(proto.ENUM, number=5, enum=Gender,) + + mood = proto.Field(proto.ENUM, number=6, enum=Mood,) + + number = proto.Field(proto.ENUM, number=7, enum=Number,) + + person = proto.Field(proto.ENUM, number=8, enum=Person,) + + proper = proto.Field(proto.ENUM, number=9, enum=Proper,) + + reciprocity = proto.Field(proto.ENUM, number=10, enum=Reciprocity,) + + tense = proto.Field(proto.ENUM, number=11, enum=Tense,) + + voice = proto.Field(proto.ENUM, number=12, enum=Voice,) + + +class DependencyEdge(proto.Message): + r"""Represents dependency parse tree information for a token. + + Attributes: + head_token_index (int): + Represents the head of this token in the dependency tree. + This is the index of the token which has an arc going to + this token. The index is the position of the token in the + array of tokens returned by the API method. If this token is + a root token, then the ``head_token_index`` is its own + index. + label (~.language_service.DependencyEdge.Label): + The parse label for the token. + """ + + class Label(proto.Enum): + r"""The parse label enum for the token.""" + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + ASP = 77 + GMOD = 78 + GOBJ = 79 + INFMOD = 80 + MES = 81 + NCOMP = 82 + + head_token_index = proto.Field(proto.INT32, number=1) + + label = proto.Field(proto.ENUM, number=2, enum=Label,) + + +class EntityMention(proto.Message): + r"""Represents a mention for an entity in the text. Currently, + proper noun mentions are supported. + + Attributes: + text (~.language_service.TextSpan): + The mention text. + type_ (~.language_service.EntityMention.Type): + The type of the entity mention. + sentiment (~.language_service.Sentiment): + For calls to [AnalyzeEntitySentiment][] or if + [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """ + + class Type(proto.Enum): + r"""The supported types of mentions.""" + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 + + text = proto.Field(proto.MESSAGE, number=1, message="TextSpan",) + + type_ = proto.Field(proto.ENUM, number=2, enum=Type,) + + sentiment = proto.Field(proto.MESSAGE, number=3, message=Sentiment,) + + +class TextSpan(proto.Message): + r"""Represents an output piece of text. + + Attributes: + content (str): + The content of the output text. + begin_offset (int): + The API calculates the beginning offset of the content in + the original document according to the + [EncodingType][google.cloud.language.v1beta2.EncodingType] + specified in the API request. + """ + + content = proto.Field(proto.STRING, number=1) + + begin_offset = proto.Field(proto.INT32, number=2) + + +class ClassificationCategory(proto.Message): + r"""Represents a category returned from the text classifier. + + Attributes: + name (str): + The name of the category representing the document, from the + `predefined + taxonomy `__. + confidence (float): + The classifier's confidence of the category. + Number represents how certain the classifier is + that this category represents the given text. + """ + + name = proto.Field(proto.STRING, number=1) + + confidence = proto.Field(proto.FLOAT, number=2) + + +class AnalyzeSentimentRequest(proto.Message): + r"""The sentiment analysis request message. + + Attributes: + document (~.language_service.Document): + Required. Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate sentence offsets for the sentence + sentiment. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeSentimentResponse(proto.Message): + r"""The sentiment analysis response message. + + Attributes: + document_sentiment (~.language_service.Sentiment): + The overall sentiment of the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + sentences (Sequence[~.language_service.Sentence]): + The sentiment for all the sentences in the + document. + """ + + document_sentiment = proto.Field(proto.MESSAGE, number=1, message=Sentiment,) + + language = proto.Field(proto.STRING, number=2) + + sentences = proto.RepeatedField(proto.MESSAGE, number=3, message=Sentence,) + + +class AnalyzeEntitySentimentRequest(proto.Message): + r"""The entity-level sentiment analysis request message. + + Attributes: + document (~.language_service.Document): + Required. Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeEntitySentimentResponse(proto.Message): + r"""The entity-level sentiment analysis response message. + + Attributes: + entities (Sequence[~.language_service.Entity]): + The recognized entities in the input document + with associated sentiments. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities = proto.RepeatedField(proto.MESSAGE, number=1, message=Entity,) + + language = proto.Field(proto.STRING, number=2) + + +class AnalyzeEntitiesRequest(proto.Message): + r"""The entity analysis request message. + + Attributes: + document (~.language_service.Document): + Required. Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeEntitiesResponse(proto.Message): + r"""The entity analysis response message. + + Attributes: + entities (Sequence[~.language_service.Entity]): + The recognized entities in the input + document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + entities = proto.RepeatedField(proto.MESSAGE, number=1, message=Entity,) + + language = proto.Field(proto.STRING, number=2) + + +class AnalyzeSyntaxRequest(proto.Message): + r"""The syntax analysis request message. + + Attributes: + document (~.language_service.Document): + Required. Input document. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + encoding_type = proto.Field(proto.ENUM, number=2, enum="EncodingType",) + + +class AnalyzeSyntaxResponse(proto.Message): + r"""The syntax analysis response message. + + Attributes: + sentences (Sequence[~.language_service.Sentence]): + Sentences in the input document. + tokens (Sequence[~.language_service.Token]): + Tokens, along with their syntactic + information, in the input document. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + """ + + sentences = proto.RepeatedField(proto.MESSAGE, number=1, message=Sentence,) + + tokens = proto.RepeatedField(proto.MESSAGE, number=2, message=Token,) + + language = proto.Field(proto.STRING, number=3) + + +class ClassifyTextRequest(proto.Message): + r"""The document classification request message. + + Attributes: + document (~.language_service.Document): + Required. Input document. + """ + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + +class ClassifyTextResponse(proto.Message): + r"""The document classification response message. + + Attributes: + categories (Sequence[~.language_service.ClassificationCategory]): + Categories representing the input document. + """ + + categories = proto.RepeatedField( + proto.MESSAGE, number=1, message=ClassificationCategory, + ) + + +class AnnotateTextRequest(proto.Message): + r"""The request message for the text annotation API, which can + perform multiple analysis types (sentiment, entities, and + syntax) in one call. + + Attributes: + document (~.language_service.Document): + Required. Input document. + features (~.language_service.AnnotateTextRequest.Features): + Required. The enabled features. + encoding_type (~.language_service.EncodingType): + The encoding type used by the API to + calculate offsets. + """ + + class Features(proto.Message): + r"""All available features for sentiment, syntax, and semantic + analysis. Setting each one to true will enable that specific + analysis for the input. Next ID: 10 + + Attributes: + extract_syntax (bool): + Extract syntax information. + extract_entities (bool): + Extract entities. + extract_document_sentiment (bool): + Extract document-level sentiment. + extract_entity_sentiment (bool): + Extract entities and their associated + sentiment. + classify_text (bool): + Classify the full document into categories. If this is true, + the API will use the default model which classifies into a + `predefined + taxonomy `__. + """ + + extract_syntax = proto.Field(proto.BOOL, number=1) + + extract_entities = proto.Field(proto.BOOL, number=2) + + extract_document_sentiment = proto.Field(proto.BOOL, number=3) + + extract_entity_sentiment = proto.Field(proto.BOOL, number=4) + + classify_text = proto.Field(proto.BOOL, number=6) + + document = proto.Field(proto.MESSAGE, number=1, message=Document,) + + features = proto.Field(proto.MESSAGE, number=2, message=Features,) + + encoding_type = proto.Field(proto.ENUM, number=3, enum="EncodingType",) + + +class AnnotateTextResponse(proto.Message): + r"""The text annotations response message. + + Attributes: + sentences (Sequence[~.language_service.Sentence]): + Sentences in the input document. Populated if the user + enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + tokens (Sequence[~.language_service.Token]): + Tokens, along with their syntactic information, in the input + document. Populated if the user enables + [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + entities (Sequence[~.language_service.Entity]): + Entities, along with their semantic information, in the + input document. Populated if the user enables + [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. + document_sentiment (~.language_service.Sentiment): + The overall sentiment for the document. Populated if the + user enables + [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. + language (str): + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See + [Document.language][google.cloud.language.v1beta2.Document.language] + field for more details. + categories (Sequence[~.language_service.ClassificationCategory]): + Categories identified in the input document. + """ + + sentences = proto.RepeatedField(proto.MESSAGE, number=1, message=Sentence,) + + tokens = proto.RepeatedField(proto.MESSAGE, number=2, message=Token,) + + entities = proto.RepeatedField(proto.MESSAGE, number=3, message=Entity,) + + document_sentiment = proto.Field(proto.MESSAGE, number=4, message=Sentiment,) + + language = proto.Field(proto.STRING, number=5) + + categories = proto.RepeatedField( + proto.MESSAGE, number=6, message=ClassificationCategory, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index 6b4fa3b0..e1a2051c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,14 +23,15 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -65,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -72,6 +79,7 @@ def default(session): session.run( "py.test", "--quiet", + "--cov=google.cloud.language", "--cov=google.cloud", "--cov=tests.unit", "--cov-append", @@ -83,17 +91,21 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -109,7 +121,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -119,25 +133,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python=["3.7"]) -def samples(session): - """Run the samples test suite.""" - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - - samples_path = "samples" - if not os.path.exists(samples_path): - session.skip("Samples not found.") - - session.install("pyyaml") - session.install("sample-tester") - session.install("-e", ".") - - session.run("sample-tester", samples_path, *session.posargs) - - -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -145,12 +141,12 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=87") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -170,3 +166,38 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/samples/snippets/api/README.rst b/samples/snippets/api/README.rst index 5f4edfd2..0d9d9451 100644 --- a/samples/snippets/api/README.rst +++ b/samples/snippets/api/README.rst @@ -14,6 +14,10 @@ This directory contains samples for Google Cloud Natural Language API. The `Goog .. _Google Cloud Natural Language API: https://cloud.google.com/natural-language/docs/ + + + + Setup ------------------------------------------------------------------------------- diff --git a/samples/snippets/api/analyze_test.py b/samples/snippets/api/analyze_test.py index b4a0db67..c797e2e3 100644 --- a/samples/snippets/api/analyze_test.py +++ b/samples/snippets/api/analyze_test.py @@ -37,8 +37,7 @@ def test_analyze_sentiment(capsys): assert sentiment["magnitude"] < 1 result = analyze.analyze_sentiment( - "cheerio, mate - I greatly admire the pallor of your visage, and your " - "angle of repose leaves little room for improvement." + "cheerio, mate - I greatly admire the pallor of your visage, and your angle of repose leaves little room for improvement." ) sentiment = result["documentSentiment"] diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py index 5660f08b..ba55d7ce 100644 --- a/samples/snippets/api/noxfile.py +++ b/samples/snippets/api/noxfile.py @@ -37,22 +37,24 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +69,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +83,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -136,7 +138,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) @@ -180,9 +182,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/classify_text/README.rst b/samples/snippets/classify_text/README.rst index a1112f21..757debb0 100644 --- a/samples/snippets/classify_text/README.rst +++ b/samples/snippets/classify_text/README.rst @@ -18,6 +18,10 @@ This tutorial demostrates how to use the `classify_text` method to classify cont .. _Google Cloud Natural Language API: https://cloud.google.com/natural-language/docs/ + + + + Setup ------------------------------------------------------------------------------- diff --git a/samples/snippets/classify_text/classify_text_tutorial.py b/samples/snippets/classify_text/classify_text_tutorial.py index fcd5008b..9c05b83f 100644 --- a/samples/snippets/classify_text/classify_text_tutorial.py +++ b/samples/snippets/classify_text/classify_text_tutorial.py @@ -26,7 +26,7 @@ import json import os -from google.cloud import language +from google.cloud import language_v1 import numpy import six @@ -37,12 +37,12 @@ def classify(text, verbose=True): """Classify the input text into categories. """ - language_client = language.LanguageServiceClient() + language_client = language_v1.LanguageServiceClient() - document = language.types.Document( - content=text, type=language.enums.Document.Type.PLAIN_TEXT + document = language_v1.Document( + content=text, type_=language_v1.Document.Type.PLAIN_TEXT ) - response = language_client.classify_text(document) + response = language_client.classify_text(request={'document': document}) categories = response.categories result = {} diff --git a/samples/snippets/classify_text/noxfile.py b/samples/snippets/classify_text/noxfile.py index 5660f08b..ba55d7ce 100644 --- a/samples/snippets/classify_text/noxfile.py +++ b/samples/snippets/classify_text/noxfile.py @@ -37,22 +37,24 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +69,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +83,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -136,7 +138,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) @@ -180,9 +182,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/cloud-client/v1/noxfile.py b/samples/snippets/cloud-client/v1/noxfile.py index 5660f08b..ba55d7ce 100644 --- a/samples/snippets/cloud-client/v1/noxfile.py +++ b/samples/snippets/cloud-client/v1/noxfile.py @@ -37,22 +37,24 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +69,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +83,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -136,7 +138,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) @@ -180,9 +182,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/cloud-client/v1/quickstart.py b/samples/snippets/cloud-client/v1/quickstart.py index 2cf46437..4c4b06b5 100644 --- a/samples/snippets/cloud-client/v1/quickstart.py +++ b/samples/snippets/cloud-client/v1/quickstart.py @@ -19,23 +19,21 @@ def run_quickstart(): # [START language_quickstart] # Imports the Google Cloud client library # [START language_python_migration_imports] - from google.cloud import language - from google.cloud.language import enums - from google.cloud.language import types + from google.cloud import language_v1 # [END language_python_migration_imports] # Instantiates a client # [START language_python_migration_client] - client = language.LanguageServiceClient() + client = language_v1.LanguageServiceClient() # [END language_python_migration_client] # The text to analyze text = u"Hello, world!" - document = types.Document(content=text, type=enums.Document.Type.PLAIN_TEXT) + document = language_v1.Document(content=text, type_=language_v1.Document.Type.PLAIN_TEXT) # Detects the sentiment of the text - sentiment = client.analyze_sentiment(document=document).document_sentiment + sentiment = client.analyze_sentiment(request={'document': document}).document_sentiment print("Text: {}".format(text)) print("Sentiment: {}, {}".format(sentiment.score, sentiment.magnitude)) diff --git a/samples/snippets/cloud-client/v1/set_endpoint.py b/samples/snippets/cloud-client/v1/set_endpoint.py index 340d5180..e9ad97d3 100644 --- a/samples/snippets/cloud-client/v1/set_endpoint.py +++ b/samples/snippets/cloud-client/v1/set_endpoint.py @@ -17,21 +17,21 @@ def set_endpoint(): """Change your endpoint""" # [START language_set_endpoint] # Imports the Google Cloud client library - from google.cloud import language + from google.cloud import language_v1 client_options = {"api_endpoint": "eu-language.googleapis.com:443"} # Instantiates a client - client = language.LanguageServiceClient(client_options=client_options) + client = language_v1.LanguageServiceClient(client_options=client_options) # [END language_set_endpoint] # The text to analyze - document = language.types.Document( - content="Hello, world!", type=language.enums.Document.Type.PLAIN_TEXT + document = language_v1.Document( + content="Hello, world!", type_=language_v1.Document.Type.PLAIN_TEXT ) # Detects the sentiment of the text - sentiment = client.analyze_sentiment(document=document).document_sentiment + sentiment = client.analyze_sentiment(request={'document': document}).document_sentiment print("Sentiment: {}, {}".format(sentiment.score, sentiment.magnitude)) diff --git a/samples/snippets/generated-samples/v1/language_sentiment_text.py b/samples/snippets/generated-samples/v1/language_sentiment_text.py index c28a3665..9f975023 100644 --- a/samples/snippets/generated-samples/v1/language_sentiment_text.py +++ b/samples/snippets/generated-samples/v1/language_sentiment_text.py @@ -24,7 +24,6 @@ # [START language_sentiment_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums import six @@ -37,10 +36,10 @@ def sample_analyze_sentiment(content): if isinstance(content, six.binary_type): content = content.decode("utf-8") - type_ = enums.Document.Type.PLAIN_TEXT - document = {"type": type_, "content": content} + type_ = language_v1.Document.Type.PLAIN_TEXT + document = {"type_": type_, "content": content} - response = client.analyze_sentiment(document) + response = client.analyze_sentiment(request={'document': document}) sentiment = response.document_sentiment print("Score: {}".format(sentiment.score)) print("Magnitude: {}".format(sentiment.magnitude)) diff --git a/samples/snippets/generated-samples/v1/noxfile.py b/samples/snippets/generated-samples/v1/noxfile.py index 5660f08b..ba55d7ce 100644 --- a/samples/snippets/generated-samples/v1/noxfile.py +++ b/samples/snippets/generated-samples/v1/noxfile.py @@ -37,22 +37,24 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +69,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +83,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -136,7 +138,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) @@ -180,9 +182,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/sentiment/noxfile.py b/samples/snippets/sentiment/noxfile.py index 5660f08b..ba55d7ce 100644 --- a/samples/snippets/sentiment/noxfile.py +++ b/samples/snippets/sentiment/noxfile.py @@ -37,22 +37,24 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +69,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +83,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -136,7 +138,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) @@ -180,9 +182,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # diff --git a/samples/snippets/sentiment/sentiment_analysis.py b/samples/snippets/sentiment/sentiment_analysis.py index aef7a658..2333bf82 100644 --- a/samples/snippets/sentiment/sentiment_analysis.py +++ b/samples/snippets/sentiment/sentiment_analysis.py @@ -17,9 +17,7 @@ # [START language_sentiment_tutorial_imports] import argparse -from google.cloud import language -from google.cloud.language import enums -from google.cloud.language import types +from google.cloud import language_v1 # [END language_sentiment_tutorial_imports] @@ -47,14 +45,14 @@ def print_result(annotations): # [START language_sentiment_tutorial_analyze_sentiment] def analyze(movie_review_filename): """Run a sentiment analysis request on text within a passed filename.""" - client = language.LanguageServiceClient() + client = language_v1.LanguageServiceClient() with open(movie_review_filename, "r") as review_file: # Instantiates a plain text document. content = review_file.read() - document = types.Document(content=content, type=enums.Document.Type.PLAIN_TEXT) - annotations = client.analyze_sentiment(document=document) + document = language_v1.Document(content=content, type_=language_v1.Document.Type.PLAIN_TEXT) + annotations = client.analyze_sentiment(request={'document': document}) # Print the results print_result(annotations) diff --git a/samples/v1/language_classify_gcs.py b/samples/v1/language_classify_gcs.py index 941640b1..a20789cc 100644 --- a/samples/v1/language_classify_gcs.py +++ b/samples/v1/language_classify_gcs.py @@ -26,8 +26,6 @@ # [START language_classify_gcs] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_classify_text(gcs_content_uri): """ @@ -44,7 +42,7 @@ def sample_classify_text(gcs_content_uri): # gcs_content_uri = 'gs://cloud-samples-data/language/classify-entertainment.txt' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -52,7 +50,7 @@ def sample_classify_text(gcs_content_uri): language = "en" document = {"gcs_content_uri": gcs_content_uri, "type": type_, "language": language} - response = client.classify_text(document) + response = client.classify_text(request = {'document': document}) # Loop through classified categories returned from the API for category in response.categories: # Get the name of the category representing the document. diff --git a/samples/v1/language_classify_text.py b/samples/v1/language_classify_text.py index 52175f02..ad55d26c 100644 --- a/samples/v1/language_classify_text.py +++ b/samples/v1/language_classify_text.py @@ -26,8 +26,6 @@ # [START language_classify_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_classify_text(text_content): """ @@ -42,7 +40,7 @@ def sample_classify_text(text_content): # text_content = 'That actor on TV makes movies in Hollywood and also stars in a variety of popular new TV shows.' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -50,7 +48,7 @@ def sample_classify_text(text_content): language = "en" document = {"content": text_content, "type": type_, "language": language} - response = client.classify_text(document) + response = client.classify_text(request = {'document': document}) # Loop through classified categories returned from the API for category in response.categories: # Get the name of the category representing the document. diff --git a/samples/v1/language_entities_gcs.py b/samples/v1/language_entities_gcs.py index 790592ca..d735e885 100644 --- a/samples/v1/language_entities_gcs.py +++ b/samples/v1/language_entities_gcs.py @@ -26,8 +26,6 @@ # [START language_entities_gcs] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_entities(gcs_content_uri): """ @@ -43,7 +41,7 @@ def sample_analyze_entities(gcs_content_uri): # gcs_content_uri = 'gs://cloud-samples-data/language/entity.txt' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -52,14 +50,14 @@ def sample_analyze_entities(gcs_content_uri): document = {"gcs_content_uri": gcs_content_uri, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1..EncodingType.UTF8 - response = client.analyze_entities(document, encoding_type=encoding_type) + response = client.analyze_entities(request = {'document': document, 'encoding_type': encoding_type}) # Loop through entitites returned from the API for entity in response.entities: print(u"Representative name for the entity: {}".format(entity.name)) # Get entity type, e.g. PERSON, LOCATION, ADDRESS, NUMBER, et al - print(u"Entity type: {}".format(enums.Entity.Type(entity.type).name)) + print(u"Entity type: {}".format(language_v1.Entity.Type(entity.type).name)) # Get the salience score associated with the entity in the [0, 1.0] range print(u"Salience score: {}".format(entity.salience)) # Loop over the metadata associated with entity. For many known entities, @@ -75,7 +73,7 @@ def sample_analyze_entities(gcs_content_uri): print(u"Mention text: {}".format(mention.text.content)) # Get the mention type, e.g. PROPER for proper noun print( - u"Mention type: {}".format(enums.EntityMention.Type(mention.type).name) + u"Mention type: {}".format(language_v1.EntityMention.Type(mention.type).name) ) # Get the language of the text, which will be the same as diff --git a/samples/v1/language_entities_text.py b/samples/v1/language_entities_text.py index 464a313d..db2ad9e2 100644 --- a/samples/v1/language_entities_text.py +++ b/samples/v1/language_entities_text.py @@ -26,8 +26,6 @@ # [START language_entities_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_entities(text_content): """ @@ -42,7 +40,7 @@ def sample_analyze_entities(text_content): # text_content = 'California is a state.' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -51,16 +49,16 @@ def sample_analyze_entities(text_content): document = {"content": text_content, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_entities(document, encoding_type=encoding_type) + response = client.analyze_entities(request = {'document': document, 'encoding_type': encoding_type}) # Loop through entitites returned from the API for entity in response.entities: print(u"Representative name for the entity: {}".format(entity.name)) # Get entity type, e.g. PERSON, LOCATION, ADDRESS, NUMBER, et al - print(u"Entity type: {}".format(enums.Entity.Type(entity.type).name)) + print(u"Entity type: {}".format(language_v1.Entity.Type(entity.type).name)) # Get the salience score associated with the entity in the [0, 1.0] range print(u"Salience score: {}".format(entity.salience)) @@ -79,7 +77,7 @@ def sample_analyze_entities(text_content): # Get the mention type, e.g. PROPER for proper noun print( - u"Mention type: {}".format(enums.EntityMention.Type(mention.type).name) + u"Mention type: {}".format(language_v1.EntityMention.Type(mention.type).name) ) # Get the language of the text, which will be the same as diff --git a/samples/v1/language_entity_sentiment_gcs.py b/samples/v1/language_entity_sentiment_gcs.py index 9fafa737..2a4c6ff3 100644 --- a/samples/v1/language_entity_sentiment_gcs.py +++ b/samples/v1/language_entity_sentiment_gcs.py @@ -26,8 +26,6 @@ # [START language_entity_sentiment_gcs] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_entity_sentiment(gcs_content_uri): """ @@ -43,7 +41,7 @@ def sample_analyze_entity_sentiment(gcs_content_uri): # gcs_content_uri = 'gs://cloud-samples-data/language/entity-sentiment.txt' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -52,14 +50,14 @@ def sample_analyze_entity_sentiment(gcs_content_uri): document = {"gcs_content_uri": gcs_content_uri, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_entity_sentiment(document, encoding_type=encoding_type) + response = client.analyze_entity_sentiment(request = {'document': document, 'encoding_type': encoding_type}) # Loop through entitites returned from the API for entity in response.entities: print(u"Representative name for the entity: {}".format(entity.name)) # Get entity type, e.g. PERSON, LOCATION, ADDRESS, NUMBER, et al - print(u"Entity type: {}".format(enums.Entity.Type(entity.type).name)) + print(u"Entity type: {}".format(language_v1.Entity.Type(entity.type).name)) # Get the salience score associated with the entity in the [0, 1.0] range print(u"Salience score: {}".format(entity.salience)) # Get the aggregate sentiment expressed for this entity in the provided document. @@ -79,7 +77,7 @@ def sample_analyze_entity_sentiment(gcs_content_uri): print(u"Mention text: {}".format(mention.text.content)) # Get the mention type, e.g. PROPER for proper noun print( - u"Mention type: {}".format(enums.EntityMention.Type(mention.type).name) + u"Mention type: {}".format(language_v1.EntityMention.Type(mention.type).name) ) # Get the language of the text, which will be the same as diff --git a/samples/v1/language_entity_sentiment_text.py b/samples/v1/language_entity_sentiment_text.py index 9b3d5b8a..20c9dbd8 100644 --- a/samples/v1/language_entity_sentiment_text.py +++ b/samples/v1/language_entity_sentiment_text.py @@ -26,8 +26,6 @@ # [START language_entity_sentiment_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_entity_sentiment(text_content): """ @@ -42,7 +40,7 @@ def sample_analyze_entity_sentiment(text_content): # text_content = 'Grapes are good. Bananas are bad.' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -51,14 +49,14 @@ def sample_analyze_entity_sentiment(text_content): document = {"content": text_content, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_entity_sentiment(document, encoding_type=encoding_type) + response = client.analyze_entity_sentiment(request = {'document': document, 'encoding_type': encoding_type}) # Loop through entitites returned from the API for entity in response.entities: print(u"Representative name for the entity: {}".format(entity.name)) # Get entity type, e.g. PERSON, LOCATION, ADDRESS, NUMBER, et al - print(u"Entity type: {}".format(enums.Entity.Type(entity.type).name)) + print(u"Entity type: {}".format(language_v1.Entity.Type(entity.type).name)) # Get the salience score associated with the entity in the [0, 1.0] range print(u"Salience score: {}".format(entity.salience)) # Get the aggregate sentiment expressed for this entity in the provided document. @@ -78,7 +76,7 @@ def sample_analyze_entity_sentiment(text_content): print(u"Mention text: {}".format(mention.text.content)) # Get the mention type, e.g. PROPER for proper noun print( - u"Mention type: {}".format(enums.EntityMention.Type(mention.type).name) + u"Mention type: {}".format(language_v1.EntityMention.Type(mention.type).name) ) # Get the language of the text, which will be the same as diff --git a/samples/v1/language_sentiment_gcs.py b/samples/v1/language_sentiment_gcs.py index 261f2f3e..68839805 100644 --- a/samples/v1/language_sentiment_gcs.py +++ b/samples/v1/language_sentiment_gcs.py @@ -26,8 +26,6 @@ # [START language_sentiment_gcs] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_sentiment(gcs_content_uri): """ @@ -43,7 +41,7 @@ def sample_analyze_sentiment(gcs_content_uri): # gcs_content_uri = 'gs://cloud-samples-data/language/sentiment-positive.txt' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -52,9 +50,9 @@ def sample_analyze_sentiment(gcs_content_uri): document = {"gcs_content_uri": gcs_content_uri, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_sentiment(document, encoding_type=encoding_type) + response = client.analyze_sentiment(request = {'document': document, 'encoding_type': encoding_type}) # Get overall sentiment of the input document print(u"Document sentiment score: {}".format(response.document_sentiment.score)) print( diff --git a/samples/v1/language_sentiment_text.py b/samples/v1/language_sentiment_text.py index 12f1e221..0be2b6cf 100644 --- a/samples/v1/language_sentiment_text.py +++ b/samples/v1/language_sentiment_text.py @@ -26,8 +26,6 @@ # [START language_sentiment_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_sentiment(text_content): """ @@ -42,7 +40,7 @@ def sample_analyze_sentiment(text_content): # text_content = 'I am so happy and joyful.' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -51,9 +49,9 @@ def sample_analyze_sentiment(text_content): document = {"content": text_content, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_sentiment(document, encoding_type=encoding_type) + response = client.analyze_sentiment(request = {'document': document, 'encoding_type': encoding_type}) # Get overall sentiment of the input document print(u"Document sentiment score: {}".format(response.document_sentiment.score)) print( diff --git a/samples/v1/language_syntax_gcs.py b/samples/v1/language_syntax_gcs.py index 32bf2acb..e04be406 100644 --- a/samples/v1/language_syntax_gcs.py +++ b/samples/v1/language_syntax_gcs.py @@ -26,8 +26,6 @@ # [START language_syntax_gcs] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_syntax(gcs_content_uri): """ @@ -43,7 +41,7 @@ def sample_analyze_syntax(gcs_content_uri): # gcs_content_uri = 'gs://cloud-samples-data/language/syntax-sentence.txt' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -52,9 +50,9 @@ def sample_analyze_syntax(gcs_content_uri): document = {"gcs_content_uri": gcs_content_uri, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_syntax(document, encoding_type=encoding_type) + response = client.analyze_syntax(request = {'document': document, 'encoding_type': encoding_type}) # Loop through tokens returned from the API for token in response.tokens: # Get the text content of this token. Usually a word or punctuation. @@ -70,13 +68,13 @@ def sample_analyze_syntax(gcs_content_uri): # Get the tag, e.g. NOUN, ADJ for Adjective, et al. print( u"Part of Speech tag: {}".format( - enums.PartOfSpeech.Tag(part_of_speech.tag).name + language_v1.PartOfSpeech.Tag(part_of_speech.tag).name ) ) # Get the voice, e.g. ACTIVE or PASSIVE - print(u"Voice: {}".format(enums.PartOfSpeech.Voice(part_of_speech.voice).name)) + print(u"Voice: {}".format(language_v1.PartOfSpeech.Voice(part_of_speech.voice).name)) # Get the tense, e.g. PAST, FUTURE, PRESENT, et al. - print(u"Tense: {}".format(enums.PartOfSpeech.Tense(part_of_speech.tense).name)) + print(u"Tense: {}".format(language_v1.PartOfSpeech.Tense(part_of_speech.tense).name)) # See API reference for additional Part of Speech information available # Get the lemma of the token. Wikipedia lemma description # https://en.wikipedia.org/wiki/Lemma_(morphology) @@ -87,7 +85,7 @@ def sample_analyze_syntax(gcs_content_uri): dependency_edge = token.dependency_edge print(u"Head token index: {}".format(dependency_edge.head_token_index)) print( - u"Label: {}".format(enums.DependencyEdge.Label(dependency_edge.label).name) + u"Label: {}".format(language_v1.DependencyEdge.Label(dependency_edge.label).name) ) # Get the language of the text, which will be the same as diff --git a/samples/v1/language_syntax_text.py b/samples/v1/language_syntax_text.py index 29041886..9f37e92c 100644 --- a/samples/v1/language_syntax_text.py +++ b/samples/v1/language_syntax_text.py @@ -26,8 +26,6 @@ # [START language_syntax_text] from google.cloud import language_v1 -from google.cloud.language_v1 import enums - def sample_analyze_syntax(text_content): """ @@ -42,7 +40,7 @@ def sample_analyze_syntax(text_content): # text_content = 'This is a short sentence.' # Available types: PLAIN_TEXT, HTML - type_ = enums.Document.Type.PLAIN_TEXT + type_ = language_v1.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: @@ -51,9 +49,9 @@ def sample_analyze_syntax(text_content): document = {"content": text_content, "type": type_, "language": language} # Available values: NONE, UTF8, UTF16, UTF32 - encoding_type = enums.EncodingType.UTF8 + encoding_type = language_v1.EncodingType.UTF8 - response = client.analyze_syntax(document, encoding_type=encoding_type) + response = client.analyze_syntax(request = {'document': document, 'encoding_type': encoding_type}) # Loop through tokens returned from the API for token in response.tokens: # Get the text content of this token. Usually a word or punctuation. @@ -69,13 +67,13 @@ def sample_analyze_syntax(text_content): # Get the tag, e.g. NOUN, ADJ for Adjective, et al. print( u"Part of Speech tag: {}".format( - enums.PartOfSpeech.Tag(part_of_speech.tag).name + language_v1.PartOfSpeech.Tag(part_of_speech.tag).name ) ) # Get the voice, e.g. ACTIVE or PASSIVE - print(u"Voice: {}".format(enums.PartOfSpeech.Voice(part_of_speech.voice).name)) + print(u"Voice: {}".format(language_v1.PartOfSpeech.Voice(part_of_speech.voice).name)) # Get the tense, e.g. PAST, FUTURE, PRESENT, et al. - print(u"Tense: {}".format(enums.PartOfSpeech.Tense(part_of_speech.tense).name)) + print(u"Tense: {}".format(language_v1.PartOfSpeech.Tense(part_of_speech.tense).name)) # See API reference for additional Part of Speech information available # Get the lemma of the token. Wikipedia lemma description # https://en.wikipedia.org/wiki/Lemma_(morphology) @@ -86,7 +84,7 @@ def sample_analyze_syntax(text_content): dependency_edge = token.dependency_edge print(u"Head token index: {}".format(dependency_edge.head_token_index)) print( - u"Label: {}".format(enums.DependencyEdge.Label(dependency_edge.label).name) + u"Label: {}".format(language_v1.DependencyEdge.Label(dependency_edge.label).name) ) # Get the language of the text, which will be the same as diff --git a/scripts/fixup_language_v1_keywords.py b/scripts/fixup_language_v1_keywords.py new file mode 100644 index 00000000..c7c107ce --- /dev/null +++ b/scripts/fixup_language_v1_keywords.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/scripts/fixup_language_v1beta2_keywords.py b/scripts/fixup_language_v1beta2_keywords.py new file mode 100644 index 00000000..c7c107ce --- /dev/null +++ b/scripts/fixup_language_v1beta2_keywords.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class languageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_entities': ('document', 'encoding_type', ), + 'analyze_entity_sentiment': ('document', 'encoding_type', ), + 'analyze_sentiment': ('document', 'encoding_type', ), + 'analyze_syntax': ('document', 'encoding_type', ), + 'annotate_text': ('document', 'features', 'encoding_type', ), + 'classify_text': ('document', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=languageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the language client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index 2c11a7b5..2020b3c6 100644 --- a/setup.py +++ b/setup.py @@ -29,8 +29,9 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34;python_version<"3.4"', + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "proto-plus >= 1.4.0", + "libcst >= 0.2.5", ] extras = {} @@ -46,7 +47,9 @@ # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] # Determine which namespaces are needed. @@ -69,12 +72,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -83,7 +84,11 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=[ + "scripts/fixup_language_v1_keywords.py", + "scripts/fixup_language_v1beta2_keywords.py", + ], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index 1da2f3fc..bc28899b 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,29 +3,22 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-language.git", - "sha": "2084dc18f3f495ceb753e4131ca616c17b25cf86" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b7f574bddb451d81aa222dad7dcecf3477cb97ed" + "remote": "git@github.com:/googleapis/python-language.git", + "sha": "cde50983b6d45fd0b2348eeb552404b391403bc6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "da29da32b3a988457b49ae290112b74f14b713cc" + "sha": "0c868d49b8e05bc1f299bc773df9eb4ef9ed96e9" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "da29da32b3a988457b49ae290112b74f14b713cc" + "sha": "0c868d49b8e05bc1f299bc773df9eb4ef9ed96e9" } } ], diff --git a/synth.py b/synth.py index ee783601..d1aec55f 100644 --- a/synth.py +++ b/synth.py @@ -33,35 +33,14 @@ bazel_target=f"//google/cloud/language/{version}:language-{version}-py", include_protos=True, ) - - s.move(library / f"google/cloud/language_{version}/proto") - s.move(library / f"google/cloud/language_{version}/gapic") - s.move(library / f"tests/unit/gapic/{version}") - s.move(library / f"tests/system/gapic/{version}") - s.move(library / f"samples") + s.move(library, excludes=["docs/index.rst", "README.rst", "setup.py"]) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=100, samples=True) - -s.move(templated_files, excludes=['noxfile.py']) - -s.replace("google/cloud/**/language_service_pb2.py", -'''__doc__ = """################################################################ - # - - Represents the input to API methods.''', -'''__doc__="""Represents the input to API methods.''' -) -s.replace( - f"google/cloud/**/gapic/language_service_client.py", - r"types\.EncodingType", - "enums.EncodingType", -) +templated_files = common.py_library(cov_level=99, samples=True, microgenerator=True,) -# TODO(busunkim): Use latest sphinx after microgenerator transition -s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') +s.move(templated_files, excludes=['.coveragerc']) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/system/gapic/v1/test_system_language_service_v1.py b/tests/system/gapic/v1/test_system_language_service_v1.py deleted file mode 100644 index e54b9339..00000000 --- a/tests/system/gapic/v1/test_system_language_service_v1.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from google.cloud import language_v1 -from google.cloud.language_v1 import enums -from google.cloud.language_v1.proto import language_service_pb2 - - -class TestSystemLanguageService(object): - def test_analyze_sentiment(self): - - client = language_v1.LanguageServiceClient() - content = "Hello, world!" - type_ = enums.Document.Type.PLAIN_TEXT - document = {"content": content, "type": type_} - response = client.analyze_sentiment(document) diff --git a/tests/system/gapic/v1beta2/test_system_language_service_v1beta2.py b/tests/system/gapic/v1beta2/test_system_language_service_v1beta2.py deleted file mode 100644 index 81edf7d6..00000000 --- a/tests/system/gapic/v1beta2/test_system_language_service_v1beta2.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time - -from google.cloud import language_v1beta2 -from google.cloud.language_v1beta2 import enums -from google.cloud.language_v1beta2.proto import language_service_pb2 - - -class TestSystemLanguageService(object): - def test_analyze_sentiment(self): - - client = language_v1beta2.LanguageServiceClient() - content = "Hello, world!" - type_ = enums.Document.Type.PLAIN_TEXT - document = {"content": content, "type": type_} - encoding_type = enums.EncodingType.NONE - response = client.analyze_sentiment(document, encoding_type=encoding_type) diff --git a/tests/unit/gapic/language_v1/__init__.py b/tests/unit/gapic/language_v1/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/language_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py new file mode 100644 index 00000000..6ccbebf7 --- /dev/null +++ b/tests/unit/gapic/language_v1/test_language_service.py @@ -0,0 +1,1771 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1.services.language_service import ( + LanguageServiceAsyncClient, +) +from google.cloud.language_v1.services.language_service import LanguageServiceClient +from google.cloud.language_v1.services.language_service import transports +from google.cloud.language_v1.types import language_service +from google.oauth2 import service_account + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [LanguageServiceClient, LanguageServiceAsyncClient] +) +def test_language_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "language.googleapis.com:443" + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + assert transport == transports.LanguageServiceGrpcTransport + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + LanguageServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceClient), +) +@mock.patch.object( + LanguageServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceAsyncClient), +) +def test_language_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LanguageServiceClient, + transports.LanguageServiceGrpcTransport, + "grpc", + "true", + ), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LanguageServiceClient, + transports.LanguageServiceGrpcTransport, + "grpc", + "false", + ), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + LanguageServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceClient), +) +@mock.patch.object( + LanguageServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_language_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_language_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_language_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_analyze_sentiment( + transport: str = "grpc", request_type=language_service.AnalyzeSentimentRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language="language_value", + ) + + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_sentiment_from_dict(): + test_analyze_sentiment(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeSentimentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSentimentResponse(language="language_value",) + ) + + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSentimentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities( + transport: str = "grpc", request_type=language_service.AnalyzeEntitiesRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language="language_value", + ) + + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + + assert response.language == "language_value" + + +def test_analyze_entities_from_dict(): + test_analyze_entities(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeEntitiesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitiesResponse(language="language_value",) + ) + + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + + assert response.language == "language_value" + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitiesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment( + transport: str = "grpc", request_type=language_service.AnalyzeEntitySentimentRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language="language_value", + ) + + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_from_dict(): + test_analyze_entity_sentiment(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeEntitySentimentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitySentimentResponse(language="language_value",) + ) + + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitySentimentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax( + transport: str = "grpc", request_type=language_service.AnalyzeSyntaxRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.analyze_syntax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language="language_value", + ) + + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + + assert response.language == "language_value" + + +def test_analyze_syntax_from_dict(): + test_analyze_syntax(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeSyntaxRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_syntax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSyntaxResponse(language="language_value",) + ) + + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + + assert response.language == "language_value" + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.analyze_syntax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_syntax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSyntaxResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_classify_text( + transport: str = "grpc", request_type=language_service.ClassifyTextRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.classify_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_from_dict(): + test_classify_text(request_type=dict) + + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.ClassifyTextRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.classify_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ClassifyTextResponse() + ) + + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.classify_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.classify_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ClassifyTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_annotate_text( + transport: str = "grpc", request_type=language_service.AnnotateTextRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.annotate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language="language_value", + ) + + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + + assert response.language == "language_value" + + +def test_annotate_text_from_dict(): + test_annotate_text(request_type=dict) + + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnnotateTextRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.annotate_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnnotateTextResponse(language="language_value",) + ) + + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + + assert response.language == "language_value" + + +def test_annotate_text_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.annotate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].features == language_service.AnnotateTextRequest.Features( + extract_syntax=True + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.annotate_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnnotateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].features == language_service.AnnotateTextRequest.Features( + extract_syntax=True + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.LanguageServiceGrpcTransport,) + + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "analyze_sentiment", + "analyze_entities", + "analyze_entity_sentiment", + "analyze_syntax", + "classify_text", + "annotate_text", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_language_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_language_service_host_no_port(): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="language.googleapis.com" + ), + ) + assert client._transport._host == "language.googleapis.com:443" + + +def test_language_service_host_with_port(): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="language.googleapis.com:8000" + ), + ) + assert client._transport._host == "language.googleapis.com:8000" + + +def test_language_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LanguageServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LanguageServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/language_v1beta2/__init__.py b/tests/unit/gapic/language_v1beta2/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/language_v1beta2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py new file mode 100644 index 00000000..5b27952c --- /dev/null +++ b/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -0,0 +1,1773 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.language_v1beta2.services.language_service import ( + LanguageServiceAsyncClient, +) +from google.cloud.language_v1beta2.services.language_service import ( + LanguageServiceClient, +) +from google.cloud.language_v1beta2.services.language_service import transports +from google.cloud.language_v1beta2.types import language_service +from google.oauth2 import service_account + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LanguageServiceClient._get_default_mtls_endpoint(None) is None + assert ( + LanguageServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LanguageServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [LanguageServiceClient, LanguageServiceAsyncClient] +) +def test_language_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "language.googleapis.com:443" + + +def test_language_service_client_get_transport_class(): + transport = LanguageServiceClient.get_transport_class() + assert transport == transports.LanguageServiceGrpcTransport + + transport = LanguageServiceClient.get_transport_class("grpc") + assert transport == transports.LanguageServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + LanguageServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceClient), +) +@mock.patch.object( + LanguageServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceAsyncClient), +) +def test_language_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LanguageServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LanguageServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LanguageServiceClient, + transports.LanguageServiceGrpcTransport, + "grpc", + "true", + ), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LanguageServiceClient, + transports.LanguageServiceGrpcTransport, + "grpc", + "false", + ), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + LanguageServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceClient), +) +@mock.patch.object( + LanguageServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LanguageServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_language_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_language_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LanguageServiceClient, transports.LanguageServiceGrpcTransport, "grpc"), + ( + LanguageServiceAsyncClient, + transports.LanguageServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_language_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_language_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = LanguageServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_analyze_sentiment( + transport: str = "grpc", request_type=language_service.AnalyzeSentimentRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse( + language="language_value", + ) + + response = client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_sentiment_from_dict(): + test_analyze_sentiment(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_sentiment_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeSentimentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSentimentResponse(language="language_value",) + ) + + response = await client.analyze_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_sentiment_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_sentiment_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSentimentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities( + transport: str = "grpc", request_type=language_service.AnalyzeEntitiesRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse( + language="language_value", + ) + + response = client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + + assert response.language == "language_value" + + +def test_analyze_entities_from_dict(): + test_analyze_entities(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_entities_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeEntitiesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitiesResponse(language="language_value",) + ) + + response = await client.analyze_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + + assert response.language == "language_value" + + +def test_analyze_entities_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entities( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_entities_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitiesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entities( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_entities_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment( + transport: str = "grpc", request_type=language_service.AnalyzeEntitySentimentRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse( + language="language_value", + ) + + response = client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_from_dict(): + test_analyze_entity_sentiment(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeEntitySentimentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitySentimentResponse(language="language_value",) + ) + + response = await client.analyze_entity_sentiment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_entity_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_entity_sentiment_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_entity_sentiment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeEntitySentimentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeEntitySentimentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_entity_sentiment( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_entity_sentiment_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax( + transport: str = "grpc", request_type=language_service.AnalyzeSyntaxRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.analyze_syntax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse( + language="language_value", + ) + + response = client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSyntaxRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + + assert response.language == "language_value" + + +def test_analyze_syntax_from_dict(): + test_analyze_syntax(request_type=dict) + + +@pytest.mark.asyncio +async def test_analyze_syntax_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnalyzeSyntaxRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_syntax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSyntaxResponse(language="language_value",) + ) + + response = await client.analyze_syntax(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + + assert response.language == "language_value" + + +def test_analyze_syntax_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.analyze_syntax), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_syntax( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_analyze_syntax_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.analyze_syntax), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnalyzeSyntaxResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnalyzeSyntaxResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_syntax( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_analyze_syntax_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_classify_text( + transport: str = "grpc", request_type=language_service.ClassifyTextRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.classify_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + response = client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.ClassifyTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_from_dict(): + test_classify_text(request_type=dict) + + +@pytest.mark.asyncio +async def test_classify_text_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.ClassifyTextRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.classify_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ClassifyTextResponse() + ) + + response = await client.classify_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.classify_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.classify_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + +def test_classify_text_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +@pytest.mark.asyncio +async def test_classify_text_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.classify_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.ClassifyTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.ClassifyTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.classify_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + +@pytest.mark.asyncio +async def test_classify_text_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_annotate_text( + transport: str = "grpc", request_type=language_service.AnnotateTextRequest +): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.annotate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse( + language="language_value", + ) + + response = client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnnotateTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + + assert response.language == "language_value" + + +def test_annotate_text_from_dict(): + test_annotate_text(request_type=dict) + + +@pytest.mark.asyncio +async def test_annotate_text_async(transport: str = "grpc_asyncio"): + client = LanguageServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = language_service.AnnotateTextRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.annotate_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnnotateTextResponse(language="language_value",) + ) + + response = await client.annotate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + + assert response.language == "language_value" + + +def test_annotate_text_flattened(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.annotate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].features == language_service.AnnotateTextRequest.Features( + extract_syntax=True + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +def test_annotate_text_flattened_error(): + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +@pytest.mark.asyncio +async def test_annotate_text_flattened_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.annotate_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = language_service.AnnotateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + language_service.AnnotateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_text( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].document == language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ) + + assert args[0].features == language_service.AnnotateTextRequest.Features( + extract_syntax=True + ) + + assert args[0].encoding_type == language_service.EncodingType.UTF8 + + +@pytest.mark.asyncio +async def test_annotate_text_flattened_error_async(): + client = LanguageServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.LanguageServiceGrpcTransport,) + + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "analyze_sentiment", + "analyze_entities", + "analyze_entity_sentiment", + "analyze_syntax", + "classify_text", + "annotate_text", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_language_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport() + adc.assert_called_once() + + +def test_language_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + LanguageServiceClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_language_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_language_service_host_no_port(): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="language.googleapis.com" + ), + ) + assert client._transport._host == "language.googleapis.com:443" + + +def test_language_service_host_with_port(): + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="language.googleapis.com:8000" + ), + ) + assert client._transport._host == "language.googleapis.com:8000" + + +def test_language_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +def test_language_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.LanguageServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.LanguageServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LanguageServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LanguageServiceClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/v1/test_language_service_client_v1.py b/tests/unit/gapic/v1/test_language_service_client_v1.py deleted file mode 100644 index 8d8362ab..00000000 --- a/tests/unit/gapic/v1/test_language_service_client_v1.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import language_v1 -from google.cloud.language_v1.proto import language_service_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestLanguageServiceClient(object): - def test_analyze_sentiment(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeSentimentResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_sentiment(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeSentimentRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_sentiment_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_sentiment(document) - - def test_analyze_entities(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeEntitiesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_entities(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeEntitiesRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_entities_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_entities(document) - - def test_analyze_entity_sentiment(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeEntitySentimentResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_entity_sentiment(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeEntitySentimentRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_entity_sentiment_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_entity_sentiment(document) - - def test_analyze_syntax(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeSyntaxResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_syntax(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeSyntaxRequest(document=document) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_syntax_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_syntax(document) - - def test_classify_text(self): - # Setup Expected Response - expected_response = {} - expected_response = language_service_pb2.ClassifyTextResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.classify_text(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.ClassifyTextRequest(document=document) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_classify_text_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.classify_text(document) - - def test_annotate_text(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnnotateTextResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup Request - document = {} - features = {} - - response = client.annotate_text(document, features) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnnotateTextRequest( - document=document, features=features - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_annotate_text_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1.LanguageServiceClient() - - # Setup request - document = {} - features = {} - - with pytest.raises(CustomException): - client.annotate_text(document, features) diff --git a/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py deleted file mode 100644 index 548357be..00000000 --- a/tests/unit/gapic/v1beta2/test_language_service_client_v1beta2.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import language_v1beta2 -from google.cloud.language_v1beta2.proto import language_service_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestLanguageServiceClient(object): - def test_analyze_sentiment(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeSentimentResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_sentiment(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeSentimentRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_sentiment_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_sentiment(document) - - def test_analyze_entities(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeEntitiesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_entities(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeEntitiesRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_entities_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_entities(document) - - def test_analyze_entity_sentiment(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeEntitySentimentResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_entity_sentiment(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeEntitySentimentRequest( - document=document - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_entity_sentiment_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_entity_sentiment(document) - - def test_analyze_syntax(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnalyzeSyntaxResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.analyze_syntax(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnalyzeSyntaxRequest(document=document) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_analyze_syntax_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.analyze_syntax(document) - - def test_classify_text(self): - # Setup Expected Response - expected_response = {} - expected_response = language_service_pb2.ClassifyTextResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - - response = client.classify_text(document) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.ClassifyTextRequest(document=document) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_classify_text_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - - with pytest.raises(CustomException): - client.classify_text(document) - - def test_annotate_text(self): - # Setup Expected Response - language = "language-1613589672" - expected_response = {"language": language} - expected_response = language_service_pb2.AnnotateTextResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup Request - document = {} - features = {} - - response = client.annotate_text(document, features) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = language_service_pb2.AnnotateTextRequest( - document=document, features=features - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_annotate_text_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = language_v1beta2.LanguageServiceClient() - - # Setup request - document = {} - features = {} - - with pytest.raises(CustomException): - client.annotate_text(document, features)