diff --git a/.flake8 b/.flake8 index ed931638..29227d4c 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 00000000..fc281c05 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52f..b4243ced 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 56b72c82..3b4c35c7 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-language +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-language" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 11181078..8ea6c422 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-language/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 00000000..2a7db027 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-language + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 00000000..cf5de74c --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index d108605d..801c16f4 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-language # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..32302e48 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.0 + hooks: + - id: flake8 diff --git a/.trampolinerc b/.trampolinerc index 995ee291..383b6ec8 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d7730567..64cb52b3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: -- PEP8 compliance, with exceptions defined in the linter configuration. + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -111,6 +120,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -123,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -192,25 +216,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-language/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/LICENSE b/LICENSE index a8ee855d..d6456956 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d12..e783f4c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/UPGRADING.md b/UPGRADING.md index 61fdb3f6..ea65e2bc 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -13,10 +13,10 @@ The 2.0.0 release requires Python 3.6+. > **WARNING**: Breaking change Methods expect request objects. We provide a script that will convert most common use cases. -* Install the library +* Install the library and `libcst`. ```py -python3 -m pip install google-cloud-language +python3 -m pip install google-cloud-language[libcst] ``` * The script `fixup_language_v1_keywords.py` is shipped with the library. It expects @@ -54,7 +54,7 @@ In `google-cloud-language<2.0.0`, parameters required by the API were positional retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, - ): + ): ``` In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. @@ -84,14 +84,14 @@ Both of these calls are valid: response = client.analyze_sentiment( request={ "document": document, - "encoding_type": encoding_type + "encoding_type": encoding_type } ) ``` ```py response = client.analyze_sentiment( - document=document, + document=document, encoding_type=encoding_type ) # Make an API request. ``` @@ -102,7 +102,7 @@ will result in an error. ```py response = client.analyze_sentiment( request={ - "document": document + "document": document }, encoding_type=encoding_type ) @@ -137,4 +137,4 @@ this path manually. ```py project = 'my-project' -project_path = f'projects/{project}' \ No newline at end of file +project_path = f'projects/{project}' diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229..bcd37bbd 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/google/cloud/language_v1/gapic/__init__.py b/google/cloud/language_v1/gapic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1/gapic/enums.py b/google/cloud/language_v1/gapic/enums.py deleted file mode 100644 index 28fefea5..00000000 --- a/google/cloud/language_v1/gapic/enums.py +++ /dev/null @@ -1,593 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class EncodingType(enum.IntEnum): - """ - Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens and - mentions, and languages that natively use different text encodings may - access offsets differently. - - Attributes: - NONE (int): If ``EncodingType`` is not specified, encoding-dependent information - (such as ``begin_offset``) will be set at ``-1``. - UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and Go are - examples of languages that use this encoding natively. - UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java and - JavaScript are examples of languages that use this encoding natively. - UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python is an - example of a language that uses this encoding natively. - """ - - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class DependencyEdge(object): - class Label(enum.IntEnum): - """ - The parse label enum for the token. - - Attributes: - UNKNOWN (int): Unknown - ABBREV (int): Abbreviation modifier - ACOMP (int): Adjectival complement - ADVCL (int): Adverbial clause modifier - ADVMOD (int): Adverbial modifier - AMOD (int): Adjectival modifier of an NP - APPOS (int): Appositional modifier of an NP - ATTR (int): Attribute dependent of a copular verb - AUX (int): Auxiliary (non-main) verb - AUXPASS (int): Passive auxiliary - CC (int): Coordinating conjunction - CCOMP (int): Clausal complement of a verb or adjective - CONJ (int): Conjunct - CSUBJ (int): Clausal subject - CSUBJPASS (int): Clausal passive subject - DEP (int): Dependency (unable to determine) - DET (int): Determiner - DISCOURSE (int): Discourse - DOBJ (int): Direct object - EXPL (int): Expletive - GOESWITH (int): Goes with (part of a word in a text not well edited) - IOBJ (int): Indirect object - MARK (int): Marker (word introducing a subordinate clause) - MWE (int): Multi-word expression - MWV (int): Multi-word verbal expression - NEG (int): Negation modifier - NN (int): Noun compound modifier - NPADVMOD (int): Noun phrase used as an adverbial modifier - NSUBJ (int): Nominal subject - NSUBJPASS (int): Passive nominal subject - NUM (int): Numeric modifier of a noun - NUMBER (int): Element of compound number - P (int): Punctuation mark - PARATAXIS (int): Parataxis relation - PARTMOD (int): Participial modifier - PCOMP (int): The complement of a preposition is a clause - POBJ (int): Object of a preposition - POSS (int): Possession modifier - POSTNEG (int): Postverbal negative particle - PRECOMP (int): Predicate complement - PRECONJ (int): Preconjunt - PREDET (int): Predeterminer - PREF (int): Prefix - PREP (int): Prepositional modifier - PRONL (int): The relationship between a verb and verbal morpheme - PRT (int): Particle - PS (int): Associative or possessive marker - QUANTMOD (int): Quantifier phrase modifier - RCMOD (int): Relative clause modifier - RCMODREL (int): Complementizer in relative clause - RDROP (int): Ellipsis without a preceding predicate - REF (int): Referent - REMNANT (int): Remnant - REPARANDUM (int): Reparandum - ROOT (int): Root - SNUM (int): Suffix specifying a unit of number - SUFF (int): Suffix - TMOD (int): Temporal modifier - TOPIC (int): Topic marker - VMOD (int): Clause headed by an infinite form of the verb that modifies a noun - VOCATIVE (int): Vocative - XCOMP (int): Open clausal complement - SUFFIX (int): Name suffix - TITLE (int): Name title - ADVPHMOD (int): Adverbial phrase modifier - AUXCAUS (int): Causative auxiliary - AUXVV (int): Helper auxiliary - DTMOD (int): Rentaishi (Prenominal modifier) - FOREIGN (int): Foreign words - KW (int): Keyword - LIST (int): List for chains of comparable items - NOMC (int): Nominalized clause - NOMCSUBJ (int): Nominalized clausal subject - NOMCSUBJPASS (int): Nominalized clausal passive - NUMC (int): Compound of numeric modifier - COP (int): Copula - DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) - ASP (int): Aspect marker - GMOD (int): Genitive modifier - GOBJ (int): Genitive object - INFMOD (int): Infinitival modifier - MES (int): Measure - NCOMP (int): Nominal complement of a noun - """ - - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - -class Document(object): - class Type(enum.IntEnum): - """ - The document types enum. - - Attributes: - TYPE_UNSPECIFIED (int): The content type is not specified. - PLAIN_TEXT (int): Plain text - HTML (int): HTML - """ - - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - -class Entity(object): - class Type(enum.IntEnum): - """ - The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph MID - (``mid``). The table below lists the associated fields for entities that - have different metadata. - - Attributes: - UNKNOWN (int): Unknown - PERSON (int): Person - LOCATION (int): Location - ORGANIZATION (int): Organization - EVENT (int): Event - WORK_OF_ART (int): Artwork - CONSUMER_GOOD (int): Consumer product - OTHER (int): Other types of entities - PHONE_NUMBER (int): Phone number The metadata lists the phone number, formatted - according to local convention, plus whichever additional elements appear - in the text: - - .. raw:: html - -
  • number – the actual number, broken down into - sections as per local convention
  • national_prefix - – country code, if detected
  • area_code – - region or area code, if detected
  • extension – - phone extension (to be dialed after connection), if detected
  • - ADDRESS (int): Address The metadata identifies the street number and locality plus - whichever additional elements appear in the text: - - .. raw:: html - -
  • street_number – street number
  • -
  • locality – city or town
  • -
  • street_name – street/route name, if detected
  • -
  • postal_code – postal code, if detected
  • -
  • country – country, if detected
  • -
  • broad_region – administrative area, such as the - state, if detected
  • narrow_region – smaller - administrative area, such as county, if detected
  • -
  • sublocality – used in Asian addresses to demark a - district within a city, if detected
  • - DATE (int): Date

    - The metadata identifies the components of the date:
      -
    • year – four digit year, if detected
    • -
    • month – two digit month number, if detected
    • -
    • day – two digit day number, if detected
    - NUMBER (int): Number

    - The metadata is the number itself. - PRICE (int): Price

    - The metadata identifies the value and currency. - """ - - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - -class EntityMention(object): - class Type(enum.IntEnum): - """ - The supported types of mentions. - - Attributes: - TYPE_UNKNOWN (int): Unknown - PROPER (int): Proper name - COMMON (int): Common noun (or noun compound) - """ - - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - -class PartOfSpeech(object): - class Aspect(enum.IntEnum): - """ - The characteristic of a verb that expresses time flow during an event. - - Attributes: - ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. - PERFECTIVE (int): Perfective - IMPERFECTIVE (int): Imperfective - PROGRESSIVE (int): Progressive - """ - - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(enum.IntEnum): - """ - The grammatical function performed by a noun or pronoun in a phrase, - clause, or sentence. In some languages, other parts of speech, such as - adjective and determiner, take case inflection in agreement with the noun. - - Attributes: - CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. - ACCUSATIVE (int): Accusative - ADVERBIAL (int): Adverbial - COMPLEMENTIVE (int): Complementive - DATIVE (int): Dative - GENITIVE (int): Genitive - INSTRUMENTAL (int): Instrumental - LOCATIVE (int): Locative - NOMINATIVE (int): Nominative - OBLIQUE (int): Oblique - PARTITIVE (int): Partitive - PREPOSITIONAL (int): Prepositional - REFLEXIVE_CASE (int): Reflexive - RELATIVE_CASE (int): Relative - VOCATIVE (int): Vocative - """ - - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(enum.IntEnum): - """ - Depending on the language, Form can be categorizing different forms of - verbs, adjectives, adverbs, etc. For example, categorizing inflected - endings of verbs and adjectives or distinguishing between short and long - forms of adjectives and participles - - Attributes: - FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. - ADNOMIAL (int): Adnomial - AUXILIARY (int): Auxiliary - COMPLEMENTIZER (int): Complementizer - FINAL_ENDING (int): Final ending - GERUND (int): Gerund - REALIS (int): Realis - IRREALIS (int): Irrealis - SHORT (int): Short form - LONG (int): Long form - ORDER (int): Order form - SPECIFIC (int): Specific form - """ - - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(enum.IntEnum): - """ - Gender classes of nouns reflected in the behaviour of associated words. - - Attributes: - GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. - FEMININE (int): Feminine - MASCULINE (int): Masculine - NEUTER (int): Neuter - """ - - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(enum.IntEnum): - """ - The grammatical feature of verbs, used for showing modality and attitude. - - Attributes: - MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. - CONDITIONAL_MOOD (int): Conditional - IMPERATIVE (int): Imperative - INDICATIVE (int): Indicative - INTERROGATIVE (int): Interrogative - JUSSIVE (int): Jussive - SUBJUNCTIVE (int): Subjunctive - """ - - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(enum.IntEnum): - """ - Count distinctions. - - Attributes: - NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. - SINGULAR (int): Singular - PLURAL (int): Plural - DUAL (int): Dual - """ - - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(enum.IntEnum): - """ - The distinction between the speaker, second person, third person, etc. - - Attributes: - PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. - FIRST (int): First - SECOND (int): Second - THIRD (int): Third - REFLEXIVE_PERSON (int): Reflexive - """ - - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(enum.IntEnum): - """ - This category shows if the token is part of a proper name. - - Attributes: - PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. - PROPER (int): Proper - NOT_PROPER (int): Not proper - """ - - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(enum.IntEnum): - """ - Reciprocal features of a pronoun. - - Attributes: - RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not - predicted. - RECIPROCAL (int): Reciprocal - NON_RECIPROCAL (int): Non-reciprocal - """ - - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tag(enum.IntEnum): - """ - The part of speech tags enum. - - Attributes: - UNKNOWN (int): Unknown - ADJ (int): Adjective - ADP (int): Adposition (preposition and postposition) - ADV (int): Adverb - CONJ (int): Conjunction - DET (int): Determiner - NOUN (int): Noun (common and proper) - NUM (int): Cardinal number - PRON (int): Pronoun - PRT (int): Particle or other function word - PUNCT (int): Punctuation - VERB (int): Verb (all tenses and modes) - X (int): Other: foreign words, typos, abbreviations - AFFIX (int): Affix - """ - - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Tense(enum.IntEnum): - """ - Time reference. - - Attributes: - TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. - CONDITIONAL_TENSE (int): Conditional - FUTURE (int): Future - PAST (int): Past - PRESENT (int): Present - IMPERFECT (int): Imperfect - PLUPERFECT (int): Pluperfect - """ - - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(enum.IntEnum): - """ - The relationship between the action that a verb expresses and the - participants identified by its arguments. - - Attributes: - VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. - ACTIVE (int): Active - CAUSATIVE (int): Causative - PASSIVE (int): Passive - """ - - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 diff --git a/google/cloud/language_v1/gapic/language_service_client.py b/google/cloud/language_v1/gapic/language_service_client.py deleted file mode 100644 index 4dba1b05..00000000 --- a/google/cloud/language_v1/gapic/language_service_client.py +++ /dev/null @@ -1,578 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.language.v1 LanguageService API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import grpc - -from google.cloud.language_v1.gapic import enums -from google.cloud.language_v1.gapic import language_service_client_config -from google.cloud.language_v1.gapic.transports import language_service_grpc_transport -from google.cloud.language_v1.proto import language_service_pb2 -from google.cloud.language_v1.proto import language_service_pb2_grpc - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-language").version - - -class LanguageServiceClient(object): - """ - Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - SERVICE_ADDRESS = "language.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.language.v1.LanguageService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.LanguageServiceGrpcTransport, - Callable[[~.Credentials, type], ~.LanguageServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = language_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=language_service_grpc_transport.LanguageServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = language_service_grpc_transport.LanguageServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def analyze_sentiment( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Analyzes the sentiment of the provided text. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_sentiment(document) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - encoding_type (~google.cloud.language_v1.enums.EncodingType): The encoding type used by the API to calculate sentence offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.AnalyzeSentimentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_sentiment" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_sentiment" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_sentiment, - default_retry=self._method_configs["AnalyzeSentiment"].retry, - default_timeout=self._method_configs["AnalyzeSentiment"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeSentimentRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_sentiment"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_entities( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_entities(document) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - encoding_type (~google.cloud.language_v1.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.AnalyzeEntitiesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_entities" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_entities" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_entities, - default_retry=self._method_configs["AnalyzeEntities"].retry, - default_timeout=self._method_configs["AnalyzeEntities"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeEntitiesRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_entities"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_entity_sentiment( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finds entities, similar to ``AnalyzeEntities`` in the text and - analyzes sentiment associated with each entity and its mentions. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_entity_sentiment(document) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - encoding_type (~google.cloud.language_v1.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.AnalyzeEntitySentimentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_entity_sentiment" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_entity_sentiment" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_entity_sentiment, - default_retry=self._method_configs["AnalyzeEntitySentiment"].retry, - default_timeout=self._method_configs["AnalyzeEntitySentiment"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeEntitySentimentRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_entity_sentiment"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_syntax( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_syntax(document) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - encoding_type (~google.cloud.language_v1.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.AnalyzeSyntaxResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_syntax" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_syntax" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_syntax, - default_retry=self._method_configs["AnalyzeSyntax"].retry, - default_timeout=self._method_configs["AnalyzeSyntax"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeSyntaxRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_syntax"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def classify_text( - self, - document, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Classifies a document into categories. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.classify_text(document) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.ClassifyTextResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "classify_text" not in self._inner_api_calls: - self._inner_api_calls[ - "classify_text" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.classify_text, - default_retry=self._method_configs["ClassifyText"].retry, - default_timeout=self._method_configs["ClassifyText"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.ClassifyTextRequest(document=document) - return self._inner_api_calls["classify_text"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def annotate_text( - self, - document, - features, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - A convenience method that provides all the features that analyzeSentiment, - analyzeEntities, and analyzeSyntax provide in one call. - - Example: - >>> from google.cloud import language_v1 - >>> - >>> client = language_v1.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> # TODO: Initialize `features`: - >>> features = {} - >>> - >>> response = client.annotate_text(document, features) - - Args: - document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Document` - features (Union[dict, ~google.cloud.language_v1.types.Features]): The enabled features. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1.types.Features` - encoding_type (~google.cloud.language_v1.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1.types.AnnotateTextResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "annotate_text" not in self._inner_api_calls: - self._inner_api_calls[ - "annotate_text" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.annotate_text, - default_retry=self._method_configs["AnnotateText"].retry, - default_timeout=self._method_configs["AnnotateText"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnnotateTextRequest( - document=document, features=features, encoding_type=encoding_type - ) - return self._inner_api_calls["annotate_text"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/language_v1/gapic/language_service_client_config.py b/google/cloud/language_v1/gapic/language_service_client_config.py deleted file mode 100644 index 061d053e..00000000 --- a/google/cloud/language_v1/gapic/language_service_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.language.v1.LanguageService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "AnalyzeSentiment": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeEntities": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeEntitySentiment": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeSyntax": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ClassifyText": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnnotateText": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/language_v1/gapic/transports/__init__.py b/google/cloud/language_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1/gapic/transports/language_service_grpc_transport.py b/google/cloud/language_v1/gapic/transports/language_service_grpc_transport.py deleted file mode 100644 index 5784072c..00000000 --- a/google/cloud/language_v1/gapic/transports/language_service_grpc_transport.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.language_v1.proto import language_service_pb2_grpc - - -class LanguageServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.language.v1 LanguageService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-language", - "https://www.googleapis.com/auth/cloud-platform", - ) - - def __init__( - self, channel=None, credentials=None, address="language.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "language_service_stub": language_service_pb2_grpc.LanguageServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="language.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def analyze_sentiment(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_sentiment`. - - Analyzes the sentiment of the provided text. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeSentiment - - @property - def analyze_entities(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_entities`. - - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeEntities - - @property - def analyze_entity_sentiment(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_entity_sentiment`. - - Finds entities, similar to ``AnalyzeEntities`` in the text and - analyzes sentiment associated with each entity and its mentions. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeEntitySentiment - - @property - def analyze_syntax(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_syntax`. - - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeSyntax - - @property - def classify_text(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.classify_text`. - - Classifies a document into categories. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].ClassifyText - - @property - def annotate_text(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.annotate_text`. - - A convenience method that provides all the features that analyzeSentiment, - analyzeEntities, and analyzeSyntax provide in one call. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnnotateText diff --git a/google/cloud/language_v1/proto/__init__.py b/google/cloud/language_v1/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1/proto/language_service_pb2.py b/google/cloud/language_v1/proto/language_service_pb2.py deleted file mode 100644 index 675c5ad4..00000000 --- a/google/cloud/language_v1/proto/language_service_pb2.py +++ /dev/null @@ -1,4568 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/language_v1/proto/language_service.proto - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/language_v1/proto/language_service.proto", - package="google.cloud.language.v1", - syntax="proto3", - serialized_options=b"\n\034com.google.cloud.language.v1B\024LanguageServiceProtoP\001Z@google.golang.org/genproto/googleapis/cloud/language/v1;language", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n5google/cloud/language_v1/proto/language_service.proto\x12\x18google.cloud.language.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto"\xc3\x01\n\x08\x44ocument\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32\'.google.cloud.language.v1.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"t\n\x08Sentence\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.TextSpan\x12\x36\n\tsentiment\x18\x02 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment"\xff\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x04type\x18\x02 \x01(\x0e\x32%.google.cloud.language.v1.Entity.Type\x12@\n\x08metadata\x18\x03 \x03(\x0b\x32..google.cloud.language.v1.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12\x39\n\x08mentions\x18\x05 \x03(\x0b\x32\'.google.cloud.language.v1.EntityMention\x12\x36\n\tsentiment\x18\x06 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb9\x01\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\t\x12\x0b\n\x07\x41\x44\x44RESS\x10\n\x12\x08\n\x04\x44\x41TE\x10\x0b\x12\n\n\x06NUMBER\x10\x0c\x12\t\n\x05PRICE\x10\r"\xcb\x01\n\x05Token\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.TextSpan\x12>\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32&.google.cloud.language.v1.PartOfSpeech\x12\x41\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xa3\x10\n\x0cPartOfSpeech\x12\x37\n\x03tag\x18\x01 \x01(\x0e\x32*.google.cloud.language.v1.PartOfSpeech.Tag\x12=\n\x06\x61spect\x18\x02 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Aspect\x12\x39\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Case\x12\x39\n\x04\x66orm\x18\x04 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Form\x12=\n\x06gender\x18\x05 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Gender\x12\x39\n\x04mood\x18\x06 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Mood\x12=\n\x06number\x18\x07 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Number\x12=\n\x06person\x18\x08 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Person\x12=\n\x06proper\x18\t \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Proper\x12G\n\x0breciprocity\x18\n \x01(\x0e\x32\x32.google.cloud.language.v1.PartOfSpeech.Reciprocity\x12;\n\x05tense\x18\x0b \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Tense\x12;\n\x05voice\x18\x0c \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x95\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12=\n\x05label\x18\x02 \x01(\x0e\x32..google.cloud.language.v1.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xe7\x01\n\rEntityMention\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.TextSpan\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.language.v1.EntityMention.Type\x12\x36\n\tsentiment\x18\x03 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x93\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType"\xa4\x01\n\x18\x41nalyzeSentimentResponse\x12?\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x35\n\tsentences\x18\x03 \x03(\x0b\x32".google.cloud.language.v1.Sentence"\x99\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType"f\n\x1e\x41nalyzeEntitySentimentResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x92\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType"_\n\x17\x41nalyzeEntitiesResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x90\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType"\x91\x01\n\x15\x41nalyzeSyntaxResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x10\n\x08language\x18\x03 \x01(\t"P\n\x13\x43lassifyTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02"\\\n\x14\x43lassifyTextResponse\x12\x44\n\ncategories\x18\x01 \x03(\x0b\x32\x30.google.cloud.language.v1.ClassificationCategory"\xfa\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.cloud.language.v1.DocumentB\x03\xe0\x41\x02\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x36.google.cloud.language.v1.AnnotateTextRequest.FeaturesB\x03\xe0\x41\x02\x12=\n\rencoding_type\x18\x03 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xcb\x02\n\x14\x41nnotateTextResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x32\n\x08\x65ntities\x18\x03 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12?\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12\x44\n\ncategories\x18\x06 \x03(\x0b\x32\x30.google.cloud.language.v1.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\xb0\n\n\x0fLanguageService\x12\xc8\x01\n\x10\x41nalyzeSentiment\x12\x31.google.cloud.language.v1.AnalyzeSentimentRequest\x1a\x32.google.cloud.language.v1.AnalyzeSentimentResponse"M\x82\xd3\xe4\x93\x02#"\x1e/v1/documents:analyzeSentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xc4\x01\n\x0f\x41nalyzeEntities\x12\x30.google.cloud.language.v1.AnalyzeEntitiesRequest\x1a\x31.google.cloud.language.v1.AnalyzeEntitiesResponse"L\x82\xd3\xe4\x93\x02""\x1d/v1/documents:analyzeEntities:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xe0\x01\n\x16\x41nalyzeEntitySentiment\x12\x37.google.cloud.language.v1.AnalyzeEntitySentimentRequest\x1a\x38.google.cloud.language.v1.AnalyzeEntitySentimentResponse"S\x82\xd3\xe4\x93\x02)"$/v1/documents:analyzeEntitySentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xbc\x01\n\rAnalyzeSyntax\x12..google.cloud.language.v1.AnalyzeSyntaxRequest\x1a/.google.cloud.language.v1.AnalyzeSyntaxResponse"J\x82\xd3\xe4\x93\x02 "\x1b/v1/documents:analyzeSyntax:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\x9f\x01\n\x0c\x43lassifyText\x12-.google.cloud.language.v1.ClassifyTextRequest\x1a..google.cloud.language.v1.ClassifyTextResponse"0\x82\xd3\xe4\x93\x02\x1f"\x1a/v1/documents:classifyText:\x01*\xda\x41\x08\x64ocument\x12\xca\x01\n\x0c\x41nnotateText\x12-.google.cloud.language.v1.AnnotateTextRequest\x1a..google.cloud.language.v1.AnnotateTextResponse"[\x82\xd3\xe4\x93\x02\x1f"\x1a/v1/documents:annotateText:\x01*\xda\x41\x1f\x64ocument,features,encoding_type\xda\x41\x11\x64ocument,features\x1az\xca\x41\x17language.googleapis.com\xd2\x41]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platformBx\n\x1c\x63om.google.cloud.language.v1B\x14LanguageServiceProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/language/v1;languageb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - ], -) - -_ENCODINGTYPE = _descriptor.EnumDescriptor( - name="EncodingType", - full_name="google.cloud.language.v1.EncodingType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="NONE", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF8", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF16", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF32", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6742, - serialized_end=6798, -) -_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) - -EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) -NONE = 0 -UTF8 = 1 -UTF16 = 2 -UTF32 = 3 - - -_DOCUMENT_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1.Document.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLAIN_TEXT", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HTML", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=303, - serialized_end=357, -) -_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) - -_ENTITY_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1.Entity.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PERSON", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LOCATION", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ORGANIZATION", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EVENT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="WORK_OF_ART", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONSUMER_GOOD", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OTHER", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PHONE_NUMBER", - index=8, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADDRESS", - index=9, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DATE", - index=10, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMBER", - index=11, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRICE", - index=12, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=814, - serialized_end=999, -) -_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) - -_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( - name="Tag", - full_name="google.cloud.language.v1.PartOfSpeech.Tag", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADJ", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADP", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADV", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONJ", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DET", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOUN", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUM", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRON", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRT", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUNCT", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VERB", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="X", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AFFIX", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2016, - serialized_end=2157, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) - -_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( - name="Aspect", - full_name="google.cloud.language.v1.PartOfSpeech.Aspect", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ASPECT_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PERFECTIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERFECTIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROGRESSIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2159, - serialized_end=2238, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) - -_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( - name="Case", - full_name="google.cloud.language.v1.PartOfSpeech.Case", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="CASE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACCUSATIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVERBIAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMPLEMENTIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DATIVE", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GENITIVE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INSTRUMENTAL", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LOCATIVE", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMINATIVE", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OBLIQUE", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARTITIVE", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREPOSITIONAL", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REFLEXIVE_CASE", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RELATIVE_CASE", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VOCATIVE", - index=14, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2241, - serialized_end=2489, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) - -_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( - name="Form", - full_name="google.cloud.language.v1.PartOfSpeech.Form", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FORM_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADNOMIAL", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXILIARY", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMPLEMENTIZER", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FINAL_ENDING", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GERUND", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REALIS", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IRREALIS", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SHORT", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LONG", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ORDER", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SPECIFIC", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2492, - serialized_end=2667, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) - -_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( - name="Gender", - full_name="google.cloud.language.v1.PartOfSpeech.Gender", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="GENDER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FEMININE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MASCULINE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NEUTER", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2669, - serialized_end=2738, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) - -_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( - name="Mood", - full_name="google.cloud.language.v1.PartOfSpeech.Mood", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="MOOD_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONDITIONAL_MOOD", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERATIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INDICATIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INTERROGATIVE", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="JUSSIVE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUBJUNCTIVE", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2740, - serialized_end=2867, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) - -_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( - name="Number", - full_name="google.cloud.language.v1.PartOfSpeech.Number", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="NUMBER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SINGULAR", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLURAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DUAL", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2869, - serialized_end=2933, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) - -_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( - name="Person", - full_name="google.cloud.language.v1.PartOfSpeech.Person", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PERSON_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FIRST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SECOND", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="THIRD", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REFLEXIVE_PERSON", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2935, - serialized_end=3019, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) - -_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( - name="Proper", - full_name="google.cloud.language.v1.PartOfSpeech.Proper", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PROPER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROPER", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOT_PROPER", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3021, - serialized_end=3077, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) - -_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( - name="Reciprocity", - full_name="google.cloud.language.v1.PartOfSpeech.Reciprocity", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="RECIPROCITY_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RECIPROCAL", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NON_RECIPROCAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3079, - serialized_end=3153, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) - -_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( - name="Tense", - full_name="google.cloud.language.v1.PartOfSpeech.Tense", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TENSE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONDITIONAL_TENSE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FUTURE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PAST", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRESENT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERFECT", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLUPERFECT", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3155, - serialized_end=3270, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) - -_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( - name="Voice", - full_name="google.cloud.language.v1.PartOfSpeech.Voice", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="VOICE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CAUSATIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PASSIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3272, - serialized_end=3338, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) - -_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( - name="Label", - full_name="google.cloud.language.v1.DependencyEdge.Label", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ABBREV", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACOMP", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVCL", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVMOD", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AMOD", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="APPOS", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTR", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUX", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXPASS", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CC", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CCOMP", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONJ", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CSUBJ", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CSUBJPASS", - index=14, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DEP", - index=15, - number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DET", - index=16, - number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISCOURSE", - index=17, - number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DOBJ", - index=18, - number=18, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EXPL", - index=19, - number=19, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GOESWITH", - index=20, - number=20, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IOBJ", - index=21, - number=21, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MARK", - index=22, - number=22, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MWE", - index=23, - number=23, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MWV", - index=24, - number=24, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NEG", - index=25, - number=25, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NN", - index=26, - number=26, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NPADVMOD", - index=27, - number=27, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NSUBJ", - index=28, - number=28, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NSUBJPASS", - index=29, - number=29, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUM", - index=30, - number=30, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMBER", - index=31, - number=31, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="P", - index=32, - number=32, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARATAXIS", - index=33, - number=33, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARTMOD", - index=34, - number=34, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PCOMP", - index=35, - number=35, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POBJ", - index=36, - number=36, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POSS", - index=37, - number=37, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POSTNEG", - index=38, - number=38, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRECOMP", - index=39, - number=39, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRECONJ", - index=40, - number=40, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREDET", - index=41, - number=41, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREF", - index=42, - number=42, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREP", - index=43, - number=43, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRONL", - index=44, - number=44, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRT", - index=45, - number=45, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PS", - index=46, - number=46, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="QUANTMOD", - index=47, - number=47, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RCMOD", - index=48, - number=48, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RCMODREL", - index=49, - number=49, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RDROP", - index=50, - number=50, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REF", - index=51, - number=51, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REMNANT", - index=52, - number=52, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REPARANDUM", - index=53, - number=53, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ROOT", - index=54, - number=54, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SNUM", - index=55, - number=55, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUFF", - index=56, - number=56, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TMOD", - index=57, - number=57, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TOPIC", - index=58, - number=58, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VMOD", - index=59, - number=59, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VOCATIVE", - index=60, - number=60, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="XCOMP", - index=61, - number=61, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUFFIX", - index=62, - number=62, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TITLE", - index=63, - number=63, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVPHMOD", - index=64, - number=64, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXCAUS", - index=65, - number=65, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXVV", - index=66, - number=66, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DTMOD", - index=67, - number=67, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FOREIGN", - index=68, - number=68, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="KW", - index=69, - number=69, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LIST", - index=70, - number=70, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMC", - index=71, - number=71, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMCSUBJ", - index=72, - number=72, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMCSUBJPASS", - index=73, - number=73, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMC", - index=74, - number=74, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COP", - index=75, - number=75, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISLOCATED", - index=76, - number=76, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ASP", - index=77, - number=77, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GMOD", - index=78, - number=78, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GOBJ", - index=79, - number=79, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INFMOD", - index=80, - number=80, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MES", - index=81, - number=81, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NCOMP", - index=82, - number=82, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3449, - serialized_end=4386, -) -_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) - -_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1.EntityMention.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROPER", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMMON", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4572, - serialized_end=4620, -) -_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) - - -_DOCUMENT = _descriptor.Descriptor( - name="Document", - full_name="google.cloud.language.v1.Document", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1.Document.type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="content", - full_name="google.cloud.language.v1.Document.content", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gcs_content_uri", - full_name="google.cloud.language.v1.Document.gcs_content_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.Document.language", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DOCUMENT_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="source", - full_name="google.cloud.language.v1.Document.source", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ) - ], - serialized_start=172, - serialized_end=367, -) - - -_SENTENCE = _descriptor.Descriptor( - name="Sentence", - full_name="google.cloud.language.v1.Sentence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1.Sentence.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1.Sentence.sentiment", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=369, - serialized_end=485, -) - - -_ENTITY_METADATAENTRY = _descriptor.Descriptor( - name="MetadataEntry", - full_name="google.cloud.language.v1.Entity.MetadataEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.language.v1.Entity.MetadataEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.language.v1.Entity.MetadataEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=764, - serialized_end=811, -) - -_ENTITY = _descriptor.Descriptor( - name="Entity", - full_name="google.cloud.language.v1.Entity", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.language.v1.Entity.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1.Entity.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.cloud.language.v1.Entity.metadata", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="salience", - full_name="google.cloud.language.v1.Entity.salience", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mentions", - full_name="google.cloud.language.v1.Entity.mentions", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1.Entity.sentiment", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_ENTITY_METADATAENTRY], - enum_types=[_ENTITY_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=488, - serialized_end=999, -) - - -_TOKEN = _descriptor.Descriptor( - name="Token", - full_name="google.cloud.language.v1.Token", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1.Token.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="part_of_speech", - full_name="google.cloud.language.v1.Token.part_of_speech", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dependency_edge", - full_name="google.cloud.language.v1.Token.dependency_edge", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="lemma", - full_name="google.cloud.language.v1.Token.lemma", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1002, - serialized_end=1205, -) - - -_SENTIMENT = _descriptor.Descriptor( - name="Sentiment", - full_name="google.cloud.language.v1.Sentiment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="magnitude", - full_name="google.cloud.language.v1.Sentiment.magnitude", - index=0, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="score", - full_name="google.cloud.language.v1.Sentiment.score", - index=1, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1207, - serialized_end=1252, -) - - -_PARTOFSPEECH = _descriptor.Descriptor( - name="PartOfSpeech", - full_name="google.cloud.language.v1.PartOfSpeech", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tag", - full_name="google.cloud.language.v1.PartOfSpeech.tag", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="aspect", - full_name="google.cloud.language.v1.PartOfSpeech.aspect", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="case", - full_name="google.cloud.language.v1.PartOfSpeech.case", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="form", - full_name="google.cloud.language.v1.PartOfSpeech.form", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gender", - full_name="google.cloud.language.v1.PartOfSpeech.gender", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mood", - full_name="google.cloud.language.v1.PartOfSpeech.mood", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="number", - full_name="google.cloud.language.v1.PartOfSpeech.number", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="person", - full_name="google.cloud.language.v1.PartOfSpeech.person", - index=7, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="proper", - full_name="google.cloud.language.v1.PartOfSpeech.proper", - index=8, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="reciprocity", - full_name="google.cloud.language.v1.PartOfSpeech.reciprocity", - index=9, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tense", - full_name="google.cloud.language.v1.PartOfSpeech.tense", - index=10, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="voice", - full_name="google.cloud.language.v1.PartOfSpeech.voice", - index=11, - number=12, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[ - _PARTOFSPEECH_TAG, - _PARTOFSPEECH_ASPECT, - _PARTOFSPEECH_CASE, - _PARTOFSPEECH_FORM, - _PARTOFSPEECH_GENDER, - _PARTOFSPEECH_MOOD, - _PARTOFSPEECH_NUMBER, - _PARTOFSPEECH_PERSON, - _PARTOFSPEECH_PROPER, - _PARTOFSPEECH_RECIPROCITY, - _PARTOFSPEECH_TENSE, - _PARTOFSPEECH_VOICE, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1255, - serialized_end=3338, -) - - -_DEPENDENCYEDGE = _descriptor.Descriptor( - name="DependencyEdge", - full_name="google.cloud.language.v1.DependencyEdge", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="head_token_index", - full_name="google.cloud.language.v1.DependencyEdge.head_token_index", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label", - full_name="google.cloud.language.v1.DependencyEdge.label", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DEPENDENCYEDGE_LABEL], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3341, - serialized_end=4386, -) - - -_ENTITYMENTION = _descriptor.Descriptor( - name="EntityMention", - full_name="google.cloud.language.v1.EntityMention", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1.EntityMention.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1.EntityMention.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1.EntityMention.sentiment", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_ENTITYMENTION_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4389, - serialized_end=4620, -) - - -_TEXTSPAN = _descriptor.Descriptor( - name="TextSpan", - full_name="google.cloud.language.v1.TextSpan", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="content", - full_name="google.cloud.language.v1.TextSpan.content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="begin_offset", - full_name="google.cloud.language.v1.TextSpan.begin_offset", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4622, - serialized_end=4671, -) - - -_CLASSIFICATIONCATEGORY = _descriptor.Descriptor( - name="ClassificationCategory", - full_name="google.cloud.language.v1.ClassificationCategory", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.language.v1.ClassificationCategory.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="confidence", - full_name="google.cloud.language.v1.ClassificationCategory.confidence", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4673, - serialized_end=4731, -) - - -_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( - name="AnalyzeSentimentRequest", - full_name="google.cloud.language.v1.AnalyzeSentimentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.AnalyzeSentimentRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1.AnalyzeSentimentRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4734, - serialized_end=4881, -) - - -_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( - name="AnalyzeSentimentResponse", - full_name="google.cloud.language.v1.AnalyzeSentimentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document_sentiment", - full_name="google.cloud.language.v1.AnalyzeSentimentResponse.document_sentiment", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.AnalyzeSentimentResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1.AnalyzeSentimentResponse.sentences", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4884, - serialized_end=5048, -) - - -_ANALYZEENTITYSENTIMENTREQUEST = _descriptor.Descriptor( - name="AnalyzeEntitySentimentRequest", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5051, - serialized_end=5204, -) - - -_ANALYZEENTITYSENTIMENTRESPONSE = _descriptor.Descriptor( - name="AnalyzeEntitySentimentResponse", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentResponse.entities", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.AnalyzeEntitySentimentResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5206, - serialized_end=5308, -) - - -_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( - name="AnalyzeEntitiesRequest", - full_name="google.cloud.language.v1.AnalyzeEntitiesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.AnalyzeEntitiesRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1.AnalyzeEntitiesRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5311, - serialized_end=5457, -) - - -_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( - name="AnalyzeEntitiesResponse", - full_name="google.cloud.language.v1.AnalyzeEntitiesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1.AnalyzeEntitiesResponse.entities", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.AnalyzeEntitiesResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5459, - serialized_end=5554, -) - - -_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( - name="AnalyzeSyntaxRequest", - full_name="google.cloud.language.v1.AnalyzeSyntaxRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.AnalyzeSyntaxRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1.AnalyzeSyntaxRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5557, - serialized_end=5701, -) - - -_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( - name="AnalyzeSyntaxResponse", - full_name="google.cloud.language.v1.AnalyzeSyntaxResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1.AnalyzeSyntaxResponse.sentences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tokens", - full_name="google.cloud.language.v1.AnalyzeSyntaxResponse.tokens", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.AnalyzeSyntaxResponse.language", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5704, - serialized_end=5849, -) - - -_CLASSIFYTEXTREQUEST = _descriptor.Descriptor( - name="ClassifyTextRequest", - full_name="google.cloud.language.v1.ClassifyTextRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.ClassifyTextRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5851, - serialized_end=5931, -) - - -_CLASSIFYTEXTRESPONSE = _descriptor.Descriptor( - name="ClassifyTextResponse", - full_name="google.cloud.language.v1.ClassifyTextResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="categories", - full_name="google.cloud.language.v1.ClassifyTextResponse.categories", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5933, - serialized_end=6025, -) - - -_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( - name="Features", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="extract_syntax", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_entities", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_document_sentiment", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_entity_sentiment", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="classify_text", - full_name="google.cloud.language.v1.AnnotateTextRequest.Features.classify_text", - index=4, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6253, - serialized_end=6406, -) - -_ANNOTATETEXTREQUEST = _descriptor.Descriptor( - name="AnnotateTextRequest", - full_name="google.cloud.language.v1.AnnotateTextRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1.AnnotateTextRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="features", - full_name="google.cloud.language.v1.AnnotateTextRequest.features", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1.AnnotateTextRequest.encoding_type", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_ANNOTATETEXTREQUEST_FEATURES], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6028, - serialized_end=6406, -) - - -_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( - name="AnnotateTextResponse", - full_name="google.cloud.language.v1.AnnotateTextResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1.AnnotateTextResponse.sentences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tokens", - full_name="google.cloud.language.v1.AnnotateTextResponse.tokens", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1.AnnotateTextResponse.entities", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="document_sentiment", - full_name="google.cloud.language.v1.AnnotateTextResponse.document_sentiment", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1.AnnotateTextResponse.language", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="categories", - full_name="google.cloud.language.v1.AnnotateTextResponse.categories", - index=5, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6409, - serialized_end=6740, -) - -_DOCUMENT.fields_by_name["type"].enum_type = _DOCUMENT_TYPE -_DOCUMENT_TYPE.containing_type = _DOCUMENT -_DOCUMENT.oneofs_by_name["source"].fields.append(_DOCUMENT.fields_by_name["content"]) -_DOCUMENT.fields_by_name["content"].containing_oneof = _DOCUMENT.oneofs_by_name[ - "source" -] -_DOCUMENT.oneofs_by_name["source"].fields.append( - _DOCUMENT.fields_by_name["gcs_content_uri"] -) -_DOCUMENT.fields_by_name["gcs_content_uri"].containing_oneof = _DOCUMENT.oneofs_by_name[ - "source" -] -_SENTENCE.fields_by_name["text"].message_type = _TEXTSPAN -_SENTENCE.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITY_METADATAENTRY.containing_type = _ENTITY -_ENTITY.fields_by_name["type"].enum_type = _ENTITY_TYPE -_ENTITY.fields_by_name["metadata"].message_type = _ENTITY_METADATAENTRY -_ENTITY.fields_by_name["mentions"].message_type = _ENTITYMENTION -_ENTITY.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITY_TYPE.containing_type = _ENTITY -_TOKEN.fields_by_name["text"].message_type = _TEXTSPAN -_TOKEN.fields_by_name["part_of_speech"].message_type = _PARTOFSPEECH -_TOKEN.fields_by_name["dependency_edge"].message_type = _DEPENDENCYEDGE -_PARTOFSPEECH.fields_by_name["tag"].enum_type = _PARTOFSPEECH_TAG -_PARTOFSPEECH.fields_by_name["aspect"].enum_type = _PARTOFSPEECH_ASPECT -_PARTOFSPEECH.fields_by_name["case"].enum_type = _PARTOFSPEECH_CASE -_PARTOFSPEECH.fields_by_name["form"].enum_type = _PARTOFSPEECH_FORM -_PARTOFSPEECH.fields_by_name["gender"].enum_type = _PARTOFSPEECH_GENDER -_PARTOFSPEECH.fields_by_name["mood"].enum_type = _PARTOFSPEECH_MOOD -_PARTOFSPEECH.fields_by_name["number"].enum_type = _PARTOFSPEECH_NUMBER -_PARTOFSPEECH.fields_by_name["person"].enum_type = _PARTOFSPEECH_PERSON -_PARTOFSPEECH.fields_by_name["proper"].enum_type = _PARTOFSPEECH_PROPER -_PARTOFSPEECH.fields_by_name["reciprocity"].enum_type = _PARTOFSPEECH_RECIPROCITY -_PARTOFSPEECH.fields_by_name["tense"].enum_type = _PARTOFSPEECH_TENSE -_PARTOFSPEECH.fields_by_name["voice"].enum_type = _PARTOFSPEECH_VOICE -_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH -_DEPENDENCYEDGE.fields_by_name["label"].enum_type = _DEPENDENCYEDGE_LABEL -_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE -_ENTITYMENTION.fields_by_name["text"].message_type = _TEXTSPAN -_ENTITYMENTION.fields_by_name["type"].enum_type = _ENTITYMENTION_TYPE -_ENTITYMENTION.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION -_ANALYZESENTIMENTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZESENTIMENTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZESENTIMENTRESPONSE.fields_by_name["document_sentiment"].message_type = _SENTIMENT -_ANALYZESENTIMENTRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZEENTITYSENTIMENTRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANALYZEENTITIESREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZEENTITIESREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZEENTITIESRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANALYZESYNTAXREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZESYNTAXREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZESYNTAXRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANALYZESYNTAXRESPONSE.fields_by_name["tokens"].message_type = _TOKEN -_CLASSIFYTEXTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_CLASSIFYTEXTRESPONSE.fields_by_name[ - "categories" -].message_type = _CLASSIFICATIONCATEGORY -_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST -_ANNOTATETEXTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANNOTATETEXTREQUEST.fields_by_name[ - "features" -].message_type = _ANNOTATETEXTREQUEST_FEATURES -_ANNOTATETEXTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANNOTATETEXTRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANNOTATETEXTRESPONSE.fields_by_name["tokens"].message_type = _TOKEN -_ANNOTATETEXTRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANNOTATETEXTRESPONSE.fields_by_name["document_sentiment"].message_type = _SENTIMENT -_ANNOTATETEXTRESPONSE.fields_by_name[ - "categories" -].message_type = _CLASSIFICATIONCATEGORY -DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT -DESCRIPTOR.message_types_by_name["Sentence"] = _SENTENCE -DESCRIPTOR.message_types_by_name["Entity"] = _ENTITY -DESCRIPTOR.message_types_by_name["Token"] = _TOKEN -DESCRIPTOR.message_types_by_name["Sentiment"] = _SENTIMENT -DESCRIPTOR.message_types_by_name["PartOfSpeech"] = _PARTOFSPEECH -DESCRIPTOR.message_types_by_name["DependencyEdge"] = _DEPENDENCYEDGE -DESCRIPTOR.message_types_by_name["EntityMention"] = _ENTITYMENTION -DESCRIPTOR.message_types_by_name["TextSpan"] = _TEXTSPAN -DESCRIPTOR.message_types_by_name["ClassificationCategory"] = _CLASSIFICATIONCATEGORY -DESCRIPTOR.message_types_by_name["AnalyzeSentimentRequest"] = _ANALYZESENTIMENTREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeSentimentResponse"] = _ANALYZESENTIMENTRESPONSE -DESCRIPTOR.message_types_by_name[ - "AnalyzeEntitySentimentRequest" -] = _ANALYZEENTITYSENTIMENTREQUEST -DESCRIPTOR.message_types_by_name[ - "AnalyzeEntitySentimentResponse" -] = _ANALYZEENTITYSENTIMENTRESPONSE -DESCRIPTOR.message_types_by_name["AnalyzeEntitiesRequest"] = _ANALYZEENTITIESREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeEntitiesResponse"] = _ANALYZEENTITIESRESPONSE -DESCRIPTOR.message_types_by_name["AnalyzeSyntaxRequest"] = _ANALYZESYNTAXREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeSyntaxResponse"] = _ANALYZESYNTAXRESPONSE -DESCRIPTOR.message_types_by_name["ClassifyTextRequest"] = _CLASSIFYTEXTREQUEST -DESCRIPTOR.message_types_by_name["ClassifyTextResponse"] = _CLASSIFYTEXTRESPONSE -DESCRIPTOR.message_types_by_name["AnnotateTextRequest"] = _ANNOTATETEXTREQUEST -DESCRIPTOR.message_types_by_name["AnnotateTextResponse"] = _ANNOTATETEXTRESPONSE -DESCRIPTOR.enum_types_by_name["EncodingType"] = _ENCODINGTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Document = _reflection.GeneratedProtocolMessageType( - "Document", - (_message.Message,), - { - "DESCRIPTOR": _DOCUMENT, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """################################################################ # - Represents the input to API methods. - - Attributes: - type: - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - source: - The source of the document: a string containing the content or - a Google Cloud Storage URI. - content: - The content of the input in string format. Cloud audit logging - exempt since it is based on user data. - gcs_content_uri: - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - language: - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language codes - are accepted. `Language Support - `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or automatically - detected) is not supported by the called API method, an - ``INVALID_ARGUMENT`` error is returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Document) - }, -) -_sym_db.RegisterMessage(Document) - -Sentence = _reflection.GeneratedProtocolMessageType( - "Sentence", - (_message.Message,), - { - "DESCRIPTOR": _SENTENCE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents a sentence in the input document. - - Attributes: - text: - The sentence text. - sentiment: - For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F - eatures.extract_document_sentiment][google.cloud.language.v1.A - nnotateTextRequest.Features.extract_document_sentiment] is set - to true, this field will contain the sentiment for the - sentence. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentence) - }, -) -_sym_db.RegisterMessage(Sentence) - -Entity = _reflection.GeneratedProtocolMessageType( - "Entity", - (_message.Message,), - { - "MetadataEntry": _reflection.GeneratedProtocolMessageType( - "MetadataEntry", - (_message.Message,), - { - "DESCRIPTOR": _ENTITY_METADATAENTRY, - "__module__": "google.cloud.language_v1.proto.language_service_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity.MetadataEntry) - }, - ), - "DESCRIPTOR": _ENTITY, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents a phrase in the text that is a known entity, such as a - person, an organization, or location. The API associates information, - such as salience and mentions, with entities. - - Attributes: - name: - The representative name for the entity. - type: - The entity type. - metadata: - Metadata associated with the entity. For most entity types, - the metadata is a Wikipedia URL (``wikipedia_url``) and - Knowledge Graph MID (``mid``), if they are available. For the - metadata associated with other entity types, see the Type - table below. - salience: - The salience score associated with the entity in the [0, 1.0] - range. The salience score for an entity provides information - about the importance or centrality of that entity to the - entire document text. Scores closer to 0 are less salient, - while scores closer to 1.0 are highly salient. - mentions: - The mentions of this entity in the input document. The API - currently supports proper noun mentions. - sentiment: - For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq - uest.Features.extract_entity_sentiment][google.cloud.language. - v1.AnnotateTextRequest.Features.extract_entity_sentiment] is - set to true, this field will contain the aggregate sentiment - expressed for this entity in the provided document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity) - }, -) -_sym_db.RegisterMessage(Entity) -_sym_db.RegisterMessage(Entity.MetadataEntry) - -Token = _reflection.GeneratedProtocolMessageType( - "Token", - (_message.Message,), - { - "DESCRIPTOR": _TOKEN, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents the smallest syntactic building block of the text. - - Attributes: - text: - The token text. - part_of_speech: - Parts of speech tag for this token. - dependency_edge: - Dependency tree parse for this token. - lemma: - \ `Lemma - `__ of - the token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Token) - }, -) -_sym_db.RegisterMessage(Token) - -Sentiment = _reflection.GeneratedProtocolMessageType( - "Sentiment", - (_message.Message,), - { - "DESCRIPTOR": _SENTIMENT, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents the feeling associated with the entire text or entities in - the text. - - Attributes: - magnitude: - A non-negative number in the [0, +inf) range, which represents - the absolute magnitude of sentiment regardless of score - (positive or negative). - score: - Sentiment score between -1.0 (negative sentiment) and 1.0 - (positive sentiment). - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentiment) - }, -) -_sym_db.RegisterMessage(Sentiment) - -PartOfSpeech = _reflection.GeneratedProtocolMessageType( - "PartOfSpeech", - (_message.Message,), - { - "DESCRIPTOR": _PARTOFSPEECH, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents part of speech information for a token. Parts of speech are - as defined in http://www.lrec- - conf.org/proceedings/lrec2012/pdf/274_Paper.pdf - - Attributes: - tag: - The part of speech tag. - aspect: - The grammatical aspect. - case: - The grammatical case. - form: - The grammatical form. - gender: - The grammatical gender. - mood: - The grammatical mood. - number: - The grammatical number. - person: - The grammatical person. - proper: - The grammatical properness. - reciprocity: - The grammatical reciprocity. - tense: - The grammatical tense. - voice: - The grammatical voice. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.PartOfSpeech) - }, -) -_sym_db.RegisterMessage(PartOfSpeech) - -DependencyEdge = _reflection.GeneratedProtocolMessageType( - "DependencyEdge", - (_message.Message,), - { - "DESCRIPTOR": _DEPENDENCYEDGE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents dependency parse tree information for a token. (For more - information on dependency labels, see - http://www.aclweb.org/anthology/P13-2017 - - Attributes: - head_token_index: - Represents the head of this token in the dependency tree. This - is the index of the token which has an arc going to this - token. The index is the position of the token in the array of - tokens returned by the API method. If this token is a root - token, then the ``head_token_index`` is its own index. - label: - The parse label for the token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.DependencyEdge) - }, -) -_sym_db.RegisterMessage(DependencyEdge) - -EntityMention = _reflection.GeneratedProtocolMessageType( - "EntityMention", - (_message.Message,), - { - "DESCRIPTOR": _ENTITYMENTION, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents a mention for an entity in the text. Currently, proper noun - mentions are supported. - - Attributes: - text: - The mention text. - type: - The type of the entity mention. - sentiment: - For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq - uest.Features.extract_entity_sentiment][google.cloud.language. - v1.AnnotateTextRequest.Features.extract_entity_sentiment] is - set to true, this field will contain the sentiment expressed - for this mention of the entity in the provided document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.EntityMention) - }, -) -_sym_db.RegisterMessage(EntityMention) - -TextSpan = _reflection.GeneratedProtocolMessageType( - "TextSpan", - (_message.Message,), - { - "DESCRIPTOR": _TEXTSPAN, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents an output piece of text. - - Attributes: - content: - The content of the output text. - begin_offset: - The API calculates the beginning offset of the content in the - original document according to the - [EncodingType][google.cloud.language.v1.EncodingType] - specified in the API request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.TextSpan) - }, -) -_sym_db.RegisterMessage(TextSpan) - -ClassificationCategory = _reflection.GeneratedProtocolMessageType( - "ClassificationCategory", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFICATIONCATEGORY, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """Represents a category returned from the text classifier. - - Attributes: - name: - The name of the category representing the document, from the - `predefined taxonomy `__. - confidence: - The classifier’s confidence of the category. Number represents - how certain the classifier is that this category represents - the given text. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.ClassificationCategory) - }, -) -_sym_db.RegisterMessage(ClassificationCategory) - -AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeSentimentRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESENTIMENTREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The sentiment analysis request message. - - Attributes: - document: - Input document. - encoding_type: - The encoding type used by the API to calculate sentence - offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeSentimentRequest) - -AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeSentimentResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESENTIMENTRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The sentiment analysis response message. - - Attributes: - document_sentiment: - The overall sentiment of the input document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1.Document.language] field for more details. - sentences: - The sentiment for all the sentences in the document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeSentimentResponse) - -AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitySentimentRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITYSENTIMENTREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The entity-level sentiment analysis request message. - - Attributes: - document: - Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitySentimentRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitySentimentRequest) - -AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitySentimentResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITYSENTIMENTRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The entity-level sentiment analysis response message. - - Attributes: - entities: - The recognized entities in the input document with associated - sentiments. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1.Document.language] field for more details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitySentimentResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitySentimentResponse) - -AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitiesRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITIESREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The entity analysis request message. - - Attributes: - document: - Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitiesRequest) - -AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitiesResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITIESRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The entity analysis response message. - - Attributes: - entities: - The recognized entities in the input document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1.Document.language] field for more details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitiesResponse) - -AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeSyntaxRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESYNTAXREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The syntax analysis request message. - - Attributes: - document: - Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeSyntaxRequest) - -AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeSyntaxResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESYNTAXRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The syntax analysis response message. - - Attributes: - sentences: - Sentences in the input document. - tokens: - Tokens, along with their syntactic information, in the input - document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1.Document.language] field for more details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeSyntaxResponse) - -ClassifyTextRequest = _reflection.GeneratedProtocolMessageType( - "ClassifyTextRequest", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFYTEXTREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The document classification request message. - - Attributes: - document: - Input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.ClassifyTextRequest) - }, -) -_sym_db.RegisterMessage(ClassifyTextRequest) - -ClassifyTextResponse = _reflection.GeneratedProtocolMessageType( - "ClassifyTextResponse", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFYTEXTRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The document classification response message. - - Attributes: - categories: - Categories representing the input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.ClassifyTextResponse) - }, -) -_sym_db.RegisterMessage(ClassifyTextResponse) - -AnnotateTextRequest = _reflection.GeneratedProtocolMessageType( - "AnnotateTextRequest", - (_message.Message,), - { - "Features": _reflection.GeneratedProtocolMessageType( - "Features", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATETEXTREQUEST_FEATURES, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """All available features for sentiment, syntax, and semantic analysis. - Setting each one to true will enable that specific analysis for the - input. - - Attributes: - extract_syntax: - Extract syntax information. - extract_entities: - Extract entities. - extract_document_sentiment: - Extract document-level sentiment. - extract_entity_sentiment: - Extract entities and their associated sentiment. - classify_text: - Classify the full document into categories. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest.Features) - }, - ), - "DESCRIPTOR": _ANNOTATETEXTREQUEST, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The request message for the text annotation API, which can perform - multiple analysis types (sentiment, entities, and syntax) in one call. - - Attributes: - document: - Input document. - features: - The enabled features. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest) - }, -) -_sym_db.RegisterMessage(AnnotateTextRequest) -_sym_db.RegisterMessage(AnnotateTextRequest.Features) - -AnnotateTextResponse = _reflection.GeneratedProtocolMessageType( - "AnnotateTextResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATETEXTRESPONSE, - "__module__": "google.cloud.language_v1.proto.language_service_pb2", - "__doc__": """The text annotations response message. - - Attributes: - sentences: - Sentences in the input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.lan - guage.v1.AnnotateTextRequest.Features.extract_syntax]. - tokens: - Tokens, along with their syntactic information, in the input - document. Populated if the user enables [AnnotateTextRequest.F - eatures.extract_syntax][google.cloud.language.v1.AnnotateTextR - equest.Features.extract_syntax]. - entities: - Entities, along with their semantic information, in the input - document. Populated if the user enables [AnnotateTextRequest.F - eatures.extract_entities][google.cloud.language.v1.AnnotateTex - tRequest.Features.extract_entities]. - document_sentiment: - The overall sentiment for the document. Populated if the user - enables [AnnotateTextRequest.Features.extract_document_sentime - nt][google.cloud.language.v1.AnnotateTextRequest.Features.extr - act_document_sentiment]. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1.Document.language] field for more details. - categories: - Categories identified in the input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextResponse) - }, -) -_sym_db.RegisterMessage(AnnotateTextResponse) - - -DESCRIPTOR._options = None -_ENTITY_METADATAENTRY._options = None -_ANALYZESENTIMENTREQUEST.fields_by_name["document"]._options = None -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"]._options = None -_ANALYZEENTITIESREQUEST.fields_by_name["document"]._options = None -_ANALYZESYNTAXREQUEST.fields_by_name["document"]._options = None -_CLASSIFYTEXTREQUEST.fields_by_name["document"]._options = None -_ANNOTATETEXTREQUEST.fields_by_name["document"]._options = None -_ANNOTATETEXTREQUEST.fields_by_name["features"]._options = None - -_LANGUAGESERVICE = _descriptor.ServiceDescriptor( - name="LanguageService", - full_name="google.cloud.language.v1.LanguageService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\027language.googleapis.com\322A]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=6801, - serialized_end=8129, - methods=[ - _descriptor.MethodDescriptor( - name="AnalyzeSentiment", - full_name="google.cloud.language.v1.LanguageService.AnalyzeSentiment", - index=0, - containing_service=None, - input_type=_ANALYZESENTIMENTREQUEST, - output_type=_ANALYZESENTIMENTRESPONSE, - serialized_options=b'\202\323\344\223\002#"\036/v1/documents:analyzeSentiment:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeEntities", - full_name="google.cloud.language.v1.LanguageService.AnalyzeEntities", - index=1, - containing_service=None, - input_type=_ANALYZEENTITIESREQUEST, - output_type=_ANALYZEENTITIESRESPONSE, - serialized_options=b'\202\323\344\223\002""\035/v1/documents:analyzeEntities:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeEntitySentiment", - full_name="google.cloud.language.v1.LanguageService.AnalyzeEntitySentiment", - index=2, - containing_service=None, - input_type=_ANALYZEENTITYSENTIMENTREQUEST, - output_type=_ANALYZEENTITYSENTIMENTRESPONSE, - serialized_options=b'\202\323\344\223\002)"$/v1/documents:analyzeEntitySentiment:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeSyntax", - full_name="google.cloud.language.v1.LanguageService.AnalyzeSyntax", - index=3, - containing_service=None, - input_type=_ANALYZESYNTAXREQUEST, - output_type=_ANALYZESYNTAXRESPONSE, - serialized_options=b'\202\323\344\223\002 "\033/v1/documents:analyzeSyntax:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ClassifyText", - full_name="google.cloud.language.v1.LanguageService.ClassifyText", - index=4, - containing_service=None, - input_type=_CLASSIFYTEXTREQUEST, - output_type=_CLASSIFYTEXTRESPONSE, - serialized_options=b'\202\323\344\223\002\037"\032/v1/documents:classifyText:\001*\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnnotateText", - full_name="google.cloud.language.v1.LanguageService.AnnotateText", - index=5, - containing_service=None, - input_type=_ANNOTATETEXTREQUEST, - output_type=_ANNOTATETEXTRESPONSE, - serialized_options=b'\202\323\344\223\002\037"\032/v1/documents:annotateText:\001*\332A\037document,features,encoding_type\332A\021document,features', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LANGUAGESERVICE) - -DESCRIPTOR.services_by_name["LanguageService"] = _LANGUAGESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/language_v1/proto/language_service_pb2_grpc.py b/google/cloud/language_v1/proto/language_service_pb2_grpc.py deleted file mode 100644 index 40a7da30..00000000 --- a/google/cloud/language_v1/proto/language_service_pb2_grpc.py +++ /dev/null @@ -1,142 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.language_v1.proto import ( - language_service_pb2 as google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2, -) - - -class LanguageServiceStub(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnalyzeSentiment = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, - ) - self.AnalyzeEntities = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/AnalyzeEntities", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, - ) - self.AnalyzeEntitySentiment = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, - ) - self.AnalyzeSyntax = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, - ) - self.ClassifyText = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/ClassifyText", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.ClassifyTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.ClassifyTextResponse.FromString, - ) - self.AnnotateText = channel.unary_unary( - "/google.cloud.language.v1.LanguageService/AnnotateText", - request_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextResponse.FromString, - ) - - -class LanguageServiceServicer(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def AnalyzeSentiment(self, request, context): - """Analyzes the sentiment of the provided text. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeEntities(self, request, context): - """Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeEntitySentiment(self, request, context): - """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes - sentiment associated with each entity and its mentions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeSyntax(self, request, context): - """Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part of speech tags, dependency trees, and other - properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ClassifyText(self, request, context): - """Classifies a document into categories. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnnotateText(self, request, context): - """A convenience method that provides all the features that analyzeSentiment, - analyzeEntities, and analyzeSyntax provide in one call. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LanguageServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "AnalyzeSentiment": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSentiment, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, - ), - "AnalyzeEntities": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntities, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, - ), - "AnalyzeEntitySentiment": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntitySentiment, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, - ), - "AnalyzeSyntax": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSyntax, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, - ), - "ClassifyText": grpc.unary_unary_rpc_method_handler( - servicer.ClassifyText, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.ClassifyTextRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.ClassifyTextResponse.SerializeToString, - ), - "AnnotateText": grpc.unary_unary_rpc_method_handler( - servicer.AnnotateText, - request_deserializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1_dot_proto_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.language.v1.LanguageService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/google/cloud/language_v1/services/language_service/async_client.py b/google/cloud/language_v1/services/language_service/async_client.py index e0a357ea..2fd88f1e 100644 --- a/google/cloud/language_v1/services/language_service/async_client.py +++ b/google/cloud/language_v1/services/language_service/async_client.py @@ -74,8 +74,36 @@ class LanguageServiceAsyncClient: LanguageServiceClient.parse_common_location_path ) - from_service_account_info = LanguageServiceClient.from_service_account_info - from_service_account_file = LanguageServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -211,6 +239,7 @@ async def analyze_sentiment( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -294,6 +323,7 @@ async def analyze_entities( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -380,6 +410,7 @@ async def analyze_entity_sentiment( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -462,6 +493,7 @@ async def analyze_syntax( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -535,6 +567,7 @@ async def classify_text( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -629,6 +662,7 @@ async def annotate_text( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py index 2e54333c..0856292b 100644 --- a/google/cloud/language_v1/services/language_service/client.py +++ b/google/cloud/language_v1/services/language_service/client.py @@ -270,21 +270,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -327,7 +323,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/google/cloud/language_v1/services/language_service/transports/base.py b/google/cloud/language_v1/services/language_service/transports/base.py index 20f77df4..1add68ea 100644 --- a/google/cloud/language_v1/services/language_service/transports/base.py +++ b/google/cloud/language_v1/services/language_service/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -116,6 +116,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -129,6 +130,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -142,6 +144,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -155,6 +158,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -168,6 +172,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -181,6 +186,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/language_v1/services/language_service/transports/grpc.py b/google/cloud/language_v1/services/language_service/transports/grpc.py index da9f57a5..fe382136 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -88,6 +89,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -102,72 +107,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -175,17 +168,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -199,7 +183,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py index 299b7c95..a262a657 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -62,7 +62,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -102,6 +102,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -133,12 +134,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -147,72 +152,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -220,17 +213,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/language_v1/types.py b/google/cloud/language_v1/types.py deleted file mode 100644 index 75882942..00000000 --- a/google/cloud/language_v1/types.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import http_pb2 -from google.protobuf import descriptor_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.language_v1.proto import language_service_pb2 - - -_shared_modules = [http_pb2, descriptor_pb2] - -_local_modules = [language_service_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.language_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/language_v1/types/__init__.py b/google/cloud/language_v1/types/__init__.py index 4598667d..025cbb98 100644 --- a/google/cloud/language_v1/types/__init__.py +++ b/google/cloud/language_v1/types/__init__.py @@ -16,53 +16,53 @@ # from .language_service import ( - Document, - Sentence, - Entity, - Token, - Sentiment, - PartOfSpeech, - DependencyEdge, - EntityMention, - TextSpan, - ClassificationCategory, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, AnalyzeEntitiesRequest, AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, AnalyzeSyntaxRequest, AnalyzeSyntaxResponse, - ClassifyTextRequest, - ClassifyTextResponse, AnnotateTextRequest, AnnotateTextResponse, + ClassificationCategory, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, EncodingType, ) __all__ = ( - "Document", - "Sentence", - "Entity", - "Token", - "Sentiment", - "PartOfSpeech", - "DependencyEdge", - "EntityMention", - "TextSpan", - "ClassificationCategory", - "AnalyzeSentimentRequest", - "AnalyzeSentimentResponse", - "AnalyzeEntitySentimentRequest", - "AnalyzeEntitySentimentResponse", "AnalyzeEntitiesRequest", "AnalyzeEntitiesResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", "AnalyzeSyntaxRequest", "AnalyzeSyntaxResponse", - "ClassifyTextRequest", - "ClassifyTextResponse", "AnnotateTextRequest", "AnnotateTextResponse", + "ClassificationCategory", + "ClassifyTextRequest", + "ClassifyTextResponse", + "DependencyEdge", + "Document", + "Entity", + "EntityMention", + "PartOfSpeech", + "Sentence", + "Sentiment", + "TextSpan", + "Token", "EncodingType", ) diff --git a/google/cloud/language_v1beta2/gapic/__init__.py b/google/cloud/language_v1beta2/gapic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1beta2/gapic/enums.py b/google/cloud/language_v1beta2/gapic/enums.py deleted file mode 100644 index f6a7be9e..00000000 --- a/google/cloud/language_v1beta2/gapic/enums.py +++ /dev/null @@ -1,598 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class EncodingType(enum.IntEnum): - """ - Represents the text encoding that the caller uses to process the - output. Providing an ``EncodingType`` is recommended because the API - provides the beginning offsets for various outputs, such as tokens and - mentions, and languages that natively use different text encodings may - access offsets differently. - - Attributes: - NONE (int): If ``EncodingType`` is not specified, encoding-dependent information - (such as ``begin_offset``) will be set at ``-1``. - UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-8 encoding of the input. C++ and Go are - examples of languages that use this encoding natively. - UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-16 encoding of the input. Java and - JavaScript are examples of languages that use this encoding natively. - UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is - calculated based on the UTF-32 encoding of the input. Python is an - example of a language that uses this encoding natively. - """ - - NONE = 0 - UTF8 = 1 - UTF16 = 2 - UTF32 = 3 - - -class DependencyEdge(object): - class Label(enum.IntEnum): - """ - The parse label enum for the token. - - Attributes: - UNKNOWN (int): Unknown - ABBREV (int): Abbreviation modifier - ACOMP (int): Adjectival complement - ADVCL (int): Adverbial clause modifier - ADVMOD (int): Adverbial modifier - AMOD (int): Adjectival modifier of an NP - APPOS (int): Appositional modifier of an NP - ATTR (int): Attribute dependent of a copular verb - AUX (int): Auxiliary (non-main) verb - AUXPASS (int): Passive auxiliary - CC (int): Coordinating conjunction - CCOMP (int): Clausal complement of a verb or adjective - CONJ (int): Conjunct - CSUBJ (int): Clausal subject - CSUBJPASS (int): Clausal passive subject - DEP (int): Dependency (unable to determine) - DET (int): Determiner - DISCOURSE (int): Discourse - DOBJ (int): Direct object - EXPL (int): Expletive - GOESWITH (int): Goes with (part of a word in a text not well edited) - IOBJ (int): Indirect object - MARK (int): Marker (word introducing a subordinate clause) - MWE (int): Multi-word expression - MWV (int): Multi-word verbal expression - NEG (int): Negation modifier - NN (int): Noun compound modifier - NPADVMOD (int): Noun phrase used as an adverbial modifier - NSUBJ (int): Nominal subject - NSUBJPASS (int): Passive nominal subject - NUM (int): Numeric modifier of a noun - NUMBER (int): Element of compound number - P (int): Punctuation mark - PARATAXIS (int): Parataxis relation - PARTMOD (int): Participial modifier - PCOMP (int): The complement of a preposition is a clause - POBJ (int): Object of a preposition - POSS (int): Possession modifier - POSTNEG (int): Postverbal negative particle - PRECOMP (int): Predicate complement - PRECONJ (int): Preconjunt - PREDET (int): Predeterminer - PREF (int): Prefix - PREP (int): Prepositional modifier - PRONL (int): The relationship between a verb and verbal morpheme - PRT (int): Particle - PS (int): Associative or possessive marker - QUANTMOD (int): Quantifier phrase modifier - RCMOD (int): Relative clause modifier - RCMODREL (int): Complementizer in relative clause - RDROP (int): Ellipsis without a preceding predicate - REF (int): Referent - REMNANT (int): Remnant - REPARANDUM (int): Reparandum - ROOT (int): Root - SNUM (int): Suffix specifying a unit of number - SUFF (int): Suffix - TMOD (int): Temporal modifier - TOPIC (int): Topic marker - VMOD (int): Clause headed by an infinite form of the verb that modifies a noun - VOCATIVE (int): Vocative - XCOMP (int): Open clausal complement - SUFFIX (int): Name suffix - TITLE (int): Name title - ADVPHMOD (int): Adverbial phrase modifier - AUXCAUS (int): Causative auxiliary - AUXVV (int): Helper auxiliary - DTMOD (int): Rentaishi (Prenominal modifier) - FOREIGN (int): Foreign words - KW (int): Keyword - LIST (int): List for chains of comparable items - NOMC (int): Nominalized clause - NOMCSUBJ (int): Nominalized clausal subject - NOMCSUBJPASS (int): Nominalized clausal passive - NUMC (int): Compound of numeric modifier - COP (int): Copula - DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) - ASP (int): Aspect marker - GMOD (int): Genitive modifier - GOBJ (int): Genitive object - INFMOD (int): Infinitival modifier - MES (int): Measure - NCOMP (int): Nominal complement of a noun - """ - - UNKNOWN = 0 - ABBREV = 1 - ACOMP = 2 - ADVCL = 3 - ADVMOD = 4 - AMOD = 5 - APPOS = 6 - ATTR = 7 - AUX = 8 - AUXPASS = 9 - CC = 10 - CCOMP = 11 - CONJ = 12 - CSUBJ = 13 - CSUBJPASS = 14 - DEP = 15 - DET = 16 - DISCOURSE = 17 - DOBJ = 18 - EXPL = 19 - GOESWITH = 20 - IOBJ = 21 - MARK = 22 - MWE = 23 - MWV = 24 - NEG = 25 - NN = 26 - NPADVMOD = 27 - NSUBJ = 28 - NSUBJPASS = 29 - NUM = 30 - NUMBER = 31 - P = 32 - PARATAXIS = 33 - PARTMOD = 34 - PCOMP = 35 - POBJ = 36 - POSS = 37 - POSTNEG = 38 - PRECOMP = 39 - PRECONJ = 40 - PREDET = 41 - PREF = 42 - PREP = 43 - PRONL = 44 - PRT = 45 - PS = 46 - QUANTMOD = 47 - RCMOD = 48 - RCMODREL = 49 - RDROP = 50 - REF = 51 - REMNANT = 52 - REPARANDUM = 53 - ROOT = 54 - SNUM = 55 - SUFF = 56 - TMOD = 57 - TOPIC = 58 - VMOD = 59 - VOCATIVE = 60 - XCOMP = 61 - SUFFIX = 62 - TITLE = 63 - ADVPHMOD = 64 - AUXCAUS = 65 - AUXVV = 66 - DTMOD = 67 - FOREIGN = 68 - KW = 69 - LIST = 70 - NOMC = 71 - NOMCSUBJ = 72 - NOMCSUBJPASS = 73 - NUMC = 74 - COP = 75 - DISLOCATED = 76 - ASP = 77 - GMOD = 78 - GOBJ = 79 - INFMOD = 80 - MES = 81 - NCOMP = 82 - - -class Document(object): - class Type(enum.IntEnum): - """ - The document types enum. - - Attributes: - TYPE_UNSPECIFIED (int): The content type is not specified. - PLAIN_TEXT (int): Plain text - HTML (int): HTML - """ - - TYPE_UNSPECIFIED = 0 - PLAIN_TEXT = 1 - HTML = 2 - - -class Entity(object): - class Type(enum.IntEnum): - """ - The type of the entity. For most entity types, the associated - metadata is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph MID - (``mid``). The table below lists the associated fields for entities that - have different metadata. - - Attributes: - UNKNOWN (int): Unknown - PERSON (int): Person - LOCATION (int): Location - ORGANIZATION (int): Organization - EVENT (int): Event - WORK_OF_ART (int): Artwork - CONSUMER_GOOD (int): Consumer product - OTHER (int): Other types of entities - PHONE_NUMBER (int): Phone number - - The metadata lists the phone number, formatted according to local - convention, plus whichever additional elements appear in the text: - - - ``number`` - the actual number, broken down into sections as per - local convention - - ``national_prefix`` - country code, if detected - - ``area_code`` - region or area code, if detected - - ``extension`` - phone extension (to be dialed after connection), if - detected - ADDRESS (int): Address - - The metadata identifies the street number and locality plus whichever - additional elements appear in the text: - - - ``street_number`` - street number - - ``locality`` - city or town - - ``street_name`` - street/route name, if detected - - ``postal_code`` - postal code, if detected - - ``country`` - country, if detected< - - ``broad_region`` - administrative area, such as the state, if - detected - - ``narrow_region`` - smaller administrative area, such as county, if - detected - - ``sublocality`` - used in Asian addresses to demark a district within - a city, if detected - DATE (int): Date - - The metadata identifies the components of the date: - - - ``year`` - four digit year, if detected - - ``month`` - two digit month number, if detected - - ``day`` - two digit day number, if detected - NUMBER (int): Number - - The metadata is the number itself. - PRICE (int): Price - - The metadata identifies the ``value`` and ``currency``. - """ - - UNKNOWN = 0 - PERSON = 1 - LOCATION = 2 - ORGANIZATION = 3 - EVENT = 4 - WORK_OF_ART = 5 - CONSUMER_GOOD = 6 - OTHER = 7 - PHONE_NUMBER = 9 - ADDRESS = 10 - DATE = 11 - NUMBER = 12 - PRICE = 13 - - -class EntityMention(object): - class Type(enum.IntEnum): - """ - The supported types of mentions. - - Attributes: - TYPE_UNKNOWN (int): Unknown - PROPER (int): Proper name - COMMON (int): Common noun (or noun compound) - """ - - TYPE_UNKNOWN = 0 - PROPER = 1 - COMMON = 2 - - -class PartOfSpeech(object): - class Aspect(enum.IntEnum): - """ - The characteristic of a verb that expresses time flow during an event. - - Attributes: - ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. - PERFECTIVE (int): Perfective - IMPERFECTIVE (int): Imperfective - PROGRESSIVE (int): Progressive - """ - - ASPECT_UNKNOWN = 0 - PERFECTIVE = 1 - IMPERFECTIVE = 2 - PROGRESSIVE = 3 - - class Case(enum.IntEnum): - """ - The grammatical function performed by a noun or pronoun in a phrase, - clause, or sentence. In some languages, other parts of speech, such as - adjective and determiner, take case inflection in agreement with the noun. - - Attributes: - CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. - ACCUSATIVE (int): Accusative - ADVERBIAL (int): Adverbial - COMPLEMENTIVE (int): Complementive - DATIVE (int): Dative - GENITIVE (int): Genitive - INSTRUMENTAL (int): Instrumental - LOCATIVE (int): Locative - NOMINATIVE (int): Nominative - OBLIQUE (int): Oblique - PARTITIVE (int): Partitive - PREPOSITIONAL (int): Prepositional - REFLEXIVE_CASE (int): Reflexive - RELATIVE_CASE (int): Relative - VOCATIVE (int): Vocative - """ - - CASE_UNKNOWN = 0 - ACCUSATIVE = 1 - ADVERBIAL = 2 - COMPLEMENTIVE = 3 - DATIVE = 4 - GENITIVE = 5 - INSTRUMENTAL = 6 - LOCATIVE = 7 - NOMINATIVE = 8 - OBLIQUE = 9 - PARTITIVE = 10 - PREPOSITIONAL = 11 - REFLEXIVE_CASE = 12 - RELATIVE_CASE = 13 - VOCATIVE = 14 - - class Form(enum.IntEnum): - """ - Depending on the language, Form can be categorizing different forms of - verbs, adjectives, adverbs, etc. For example, categorizing inflected - endings of verbs and adjectives or distinguishing between short and long - forms of adjectives and participles - - Attributes: - FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. - ADNOMIAL (int): Adnomial - AUXILIARY (int): Auxiliary - COMPLEMENTIZER (int): Complementizer - FINAL_ENDING (int): Final ending - GERUND (int): Gerund - REALIS (int): Realis - IRREALIS (int): Irrealis - SHORT (int): Short form - LONG (int): Long form - ORDER (int): Order form - SPECIFIC (int): Specific form - """ - - FORM_UNKNOWN = 0 - ADNOMIAL = 1 - AUXILIARY = 2 - COMPLEMENTIZER = 3 - FINAL_ENDING = 4 - GERUND = 5 - REALIS = 6 - IRREALIS = 7 - SHORT = 8 - LONG = 9 - ORDER = 10 - SPECIFIC = 11 - - class Gender(enum.IntEnum): - """ - Gender classes of nouns reflected in the behaviour of associated words. - - Attributes: - GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. - FEMININE (int): Feminine - MASCULINE (int): Masculine - NEUTER (int): Neuter - """ - - GENDER_UNKNOWN = 0 - FEMININE = 1 - MASCULINE = 2 - NEUTER = 3 - - class Mood(enum.IntEnum): - """ - The grammatical feature of verbs, used for showing modality and attitude. - - Attributes: - MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. - CONDITIONAL_MOOD (int): Conditional - IMPERATIVE (int): Imperative - INDICATIVE (int): Indicative - INTERROGATIVE (int): Interrogative - JUSSIVE (int): Jussive - SUBJUNCTIVE (int): Subjunctive - """ - - MOOD_UNKNOWN = 0 - CONDITIONAL_MOOD = 1 - IMPERATIVE = 2 - INDICATIVE = 3 - INTERROGATIVE = 4 - JUSSIVE = 5 - SUBJUNCTIVE = 6 - - class Number(enum.IntEnum): - """ - Count distinctions. - - Attributes: - NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. - SINGULAR (int): Singular - PLURAL (int): Plural - DUAL (int): Dual - """ - - NUMBER_UNKNOWN = 0 - SINGULAR = 1 - PLURAL = 2 - DUAL = 3 - - class Person(enum.IntEnum): - """ - The distinction between the speaker, second person, third person, etc. - - Attributes: - PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. - FIRST (int): First - SECOND (int): Second - THIRD (int): Third - REFLEXIVE_PERSON (int): Reflexive - """ - - PERSON_UNKNOWN = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - REFLEXIVE_PERSON = 4 - - class Proper(enum.IntEnum): - """ - This category shows if the token is part of a proper name. - - Attributes: - PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. - PROPER (int): Proper - NOT_PROPER (int): Not proper - """ - - PROPER_UNKNOWN = 0 - PROPER = 1 - NOT_PROPER = 2 - - class Reciprocity(enum.IntEnum): - """ - Reciprocal features of a pronoun. - - Attributes: - RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not - predicted. - RECIPROCAL (int): Reciprocal - NON_RECIPROCAL (int): Non-reciprocal - """ - - RECIPROCITY_UNKNOWN = 0 - RECIPROCAL = 1 - NON_RECIPROCAL = 2 - - class Tag(enum.IntEnum): - """ - The part of speech tags enum. - - Attributes: - UNKNOWN (int): Unknown - ADJ (int): Adjective - ADP (int): Adposition (preposition and postposition) - ADV (int): Adverb - CONJ (int): Conjunction - DET (int): Determiner - NOUN (int): Noun (common and proper) - NUM (int): Cardinal number - PRON (int): Pronoun - PRT (int): Particle or other function word - PUNCT (int): Punctuation - VERB (int): Verb (all tenses and modes) - X (int): Other: foreign words, typos, abbreviations - AFFIX (int): Affix - """ - - UNKNOWN = 0 - ADJ = 1 - ADP = 2 - ADV = 3 - CONJ = 4 - DET = 5 - NOUN = 6 - NUM = 7 - PRON = 8 - PRT = 9 - PUNCT = 10 - VERB = 11 - X = 12 - AFFIX = 13 - - class Tense(enum.IntEnum): - """ - Time reference. - - Attributes: - TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. - CONDITIONAL_TENSE (int): Conditional - FUTURE (int): Future - PAST (int): Past - PRESENT (int): Present - IMPERFECT (int): Imperfect - PLUPERFECT (int): Pluperfect - """ - - TENSE_UNKNOWN = 0 - CONDITIONAL_TENSE = 1 - FUTURE = 2 - PAST = 3 - PRESENT = 4 - IMPERFECT = 5 - PLUPERFECT = 6 - - class Voice(enum.IntEnum): - """ - The relationship between the action that a verb expresses and the - participants identified by its arguments. - - Attributes: - VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. - ACTIVE (int): Active - CAUSATIVE (int): Causative - PASSIVE (int): Passive - """ - - VOICE_UNKNOWN = 0 - ACTIVE = 1 - CAUSATIVE = 2 - PASSIVE = 3 diff --git a/google/cloud/language_v1beta2/gapic/language_service_client.py b/google/cloud/language_v1beta2/gapic/language_service_client.py deleted file mode 100644 index 8d3f9557..00000000 --- a/google/cloud/language_v1beta2/gapic/language_service_client.py +++ /dev/null @@ -1,581 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.language.v1beta2 LanguageService API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.grpc_helpers -import grpc - -from google.cloud.language_v1beta2.gapic import enums -from google.cloud.language_v1beta2.gapic import language_service_client_config -from google.cloud.language_v1beta2.gapic.transports import ( - language_service_grpc_transport, -) -from google.cloud.language_v1beta2.proto import language_service_pb2 -from google.cloud.language_v1beta2.proto import language_service_pb2_grpc - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-language").version - - -class LanguageServiceClient(object): - """ - Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - SERVICE_ADDRESS = "language.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.language.v1beta2.LanguageService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - LanguageServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.LanguageServiceGrpcTransport, - Callable[[~.Credentials, type], ~.LanguageServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = language_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=language_service_grpc_transport.LanguageServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = language_service_grpc_transport.LanguageServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def analyze_sentiment( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Analyzes the sentiment of the provided text. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_sentiment(document) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - encoding_type (~google.cloud.language_v1beta2.enums.EncodingType): The encoding type used by the API to calculate sentence offsets for the - sentence sentiment. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.AnalyzeSentimentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_sentiment" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_sentiment" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_sentiment, - default_retry=self._method_configs["AnalyzeSentiment"].retry, - default_timeout=self._method_configs["AnalyzeSentiment"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeSentimentRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_sentiment"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_entities( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_entities(document) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - encoding_type (~google.cloud.language_v1beta2.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.AnalyzeEntitiesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_entities" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_entities" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_entities, - default_retry=self._method_configs["AnalyzeEntities"].retry, - default_timeout=self._method_configs["AnalyzeEntities"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeEntitiesRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_entities"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_entity_sentiment( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finds entities, similar to ``AnalyzeEntities`` in the text and - analyzes sentiment associated with each entity and its mentions. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_entity_sentiment(document) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - encoding_type (~google.cloud.language_v1beta2.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.AnalyzeEntitySentimentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_entity_sentiment" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_entity_sentiment" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_entity_sentiment, - default_retry=self._method_configs["AnalyzeEntitySentiment"].retry, - default_timeout=self._method_configs["AnalyzeEntitySentiment"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeEntitySentimentRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_entity_sentiment"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def analyze_syntax( - self, - document, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part-of-speech tags, dependency trees, and other - properties. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.analyze_syntax(document) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - encoding_type (~google.cloud.language_v1beta2.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.AnalyzeSyntaxResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "analyze_syntax" not in self._inner_api_calls: - self._inner_api_calls[ - "analyze_syntax" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.analyze_syntax, - default_retry=self._method_configs["AnalyzeSyntax"].retry, - default_timeout=self._method_configs["AnalyzeSyntax"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnalyzeSyntaxRequest( - document=document, encoding_type=encoding_type - ) - return self._inner_api_calls["analyze_syntax"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def classify_text( - self, - document, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Classifies a document into categories. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> response = client.classify_text(document) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.ClassifyTextResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "classify_text" not in self._inner_api_calls: - self._inner_api_calls[ - "classify_text" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.classify_text, - default_retry=self._method_configs["ClassifyText"].retry, - default_timeout=self._method_configs["ClassifyText"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.ClassifyTextRequest(document=document) - return self._inner_api_calls["classify_text"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def annotate_text( - self, - document, - features, - encoding_type=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - A convenience method that provides all syntax, sentiment, entity, and - classification features in one call. - - Example: - >>> from google.cloud import language_v1beta2 - >>> - >>> client = language_v1beta2.LanguageServiceClient() - >>> - >>> # TODO: Initialize `document`: - >>> document = {} - >>> - >>> # TODO: Initialize `features`: - >>> features = {} - >>> - >>> response = client.annotate_text(document, features) - - Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Document` - features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): Required. The enabled features. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.language_v1beta2.types.Features` - encoding_type (~google.cloud.language_v1beta2.enums.EncodingType): The encoding type used by the API to calculate offsets. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.language_v1beta2.types.AnnotateTextResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "annotate_text" not in self._inner_api_calls: - self._inner_api_calls[ - "annotate_text" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.annotate_text, - default_retry=self._method_configs["AnnotateText"].retry, - default_timeout=self._method_configs["AnnotateText"].timeout, - client_info=self._client_info, - ) - - request = language_service_pb2.AnnotateTextRequest( - document=document, features=features, encoding_type=encoding_type - ) - return self._inner_api_calls["annotate_text"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/language_v1beta2/gapic/language_service_client_config.py b/google/cloud/language_v1beta2/gapic/language_service_client_config.py deleted file mode 100644 index 5b11ec46..00000000 --- a/google/cloud/language_v1beta2/gapic/language_service_client_config.py +++ /dev/null @@ -1,53 +0,0 @@ -config = { - "interfaces": { - "google.cloud.language.v1beta2.LanguageService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "AnalyzeSentiment": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeEntities": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeEntitySentiment": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnalyzeSyntax": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ClassifyText": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "AnnotateText": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/language_v1beta2/gapic/transports/__init__.py b/google/cloud/language_v1beta2/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1beta2/gapic/transports/language_service_grpc_transport.py b/google/cloud/language_v1beta2/gapic/transports/language_service_grpc_transport.py deleted file mode 100644 index 1fd3fba2..00000000 --- a/google/cloud/language_v1beta2/gapic/transports/language_service_grpc_transport.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.language_v1beta2.proto import language_service_pb2_grpc - - -class LanguageServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.language.v1beta2 LanguageService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-language", - "https://www.googleapis.com/auth/cloud-platform", - ) - - def __init__( - self, channel=None, credentials=None, address="language.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "language_service_stub": language_service_pb2_grpc.LanguageServiceStub( - channel - ) - } - - @classmethod - def create_channel( - cls, address="language.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def analyze_sentiment(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_sentiment`. - - Analyzes the sentiment of the provided text. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeSentiment - - @property - def analyze_entities(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_entities`. - - Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeEntities - - @property - def analyze_entity_sentiment(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_entity_sentiment`. - - Finds entities, similar to ``AnalyzeEntities`` in the text and - analyzes sentiment associated with each entity and its mentions. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeEntitySentiment - - @property - def analyze_syntax(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.analyze_syntax`. - - Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part-of-speech tags, dependency trees, and other - properties. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnalyzeSyntax - - @property - def classify_text(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.classify_text`. - - Classifies a document into categories. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].ClassifyText - - @property - def annotate_text(self): - """Return the gRPC stub for :meth:`LanguageServiceClient.annotate_text`. - - A convenience method that provides all syntax, sentiment, entity, and - classification features in one call. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["language_service_stub"].AnnotateText diff --git a/google/cloud/language_v1beta2/proto/__init__.py b/google/cloud/language_v1beta2/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/language_v1beta2/proto/language_service_pb2.py b/google/cloud/language_v1beta2/proto/language_service_pb2.py deleted file mode 100644 index ff31f8e6..00000000 --- a/google/cloud/language_v1beta2/proto/language_service_pb2.py +++ /dev/null @@ -1,4575 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/language_v1beta2/proto/language_service.proto - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/language_v1beta2/proto/language_service.proto", - package="google.cloud.language.v1beta2", - syntax="proto3", - serialized_options=b"\n!com.google.cloud.language.v1beta2B\024LanguageServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;language", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"\x93\x04\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb9\x01\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\t\x12\x0b\n\x07\x41\x44\x44RESS\x10\n\x12\x08\n\x04\x44\x41TE\x10\x0b\x12\n\n\x06NUMBER\x10\x0c\x12\t\n\x05PRICE\x10\r"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x9d\x01\n\x17\x41nalyzeSentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence"\xa3\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9c\x01\n\x16\x41nalyzeEntitiesRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9a\x01\n\x14\x41nalyzeSyntaxRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t"U\n\x13\x43lassifyTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory"\x89\x03\n\x13\x41nnotateTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12R\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.FeaturesB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8a\x0b\n\x0fLanguageService\x12\xd7\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse"R\x82\xd3\xe4\x93\x02("#/v1beta2/documents:analyzeSentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xd3\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse"Q\x82\xd3\xe4\x93\x02\'""/v1beta2/documents:analyzeEntities:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xef\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse"X\x82\xd3\xe4\x93\x02.")/v1beta2/documents:analyzeEntitySentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xcb\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse"O\x82\xd3\xe4\x93\x02%" /v1beta2/documents:analyzeSyntax:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xae\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse"5\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:classifyText:\x01*\xda\x41\x08\x64ocument\x12\xd9\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse"`\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:annotateText:\x01*\xda\x41\x1f\x64ocument,features,encoding_type\xda\x41\x11\x64ocument,features\x1az\xca\x41\x17language.googleapis.com\xd2\x41]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platformB\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_ENCODINGTYPE = _descriptor.EnumDescriptor( - name="EncodingType", - full_name="google.cloud.language.v1beta2.EncodingType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="NONE", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF8", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF16", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UTF32", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=7035, - serialized_end=7091, -) -_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) - -EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) -NONE = 0 -UTF8 = 1 -UTF16 = 2 -UTF32 = 3 - - -_DOCUMENT_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1beta2.Document.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLAIN_TEXT", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HTML", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=351, - serialized_end=405, -) -_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) - -_ENTITY_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1beta2.Entity.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PERSON", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LOCATION", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ORGANIZATION", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EVENT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="WORK_OF_ART", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONSUMER_GOOD", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OTHER", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PHONE_NUMBER", - index=8, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADDRESS", - index=9, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DATE", - index=10, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMBER", - index=11, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRICE", - index=12, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=892, - serialized_end=1077, -) -_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) - -_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( - name="Tag", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Tag", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADJ", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADP", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADV", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONJ", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DET", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOUN", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUM", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRON", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRT", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PUNCT", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VERB", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="X", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AFFIX", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2169, - serialized_end=2310, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) - -_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( - name="Aspect", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Aspect", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ASPECT_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PERFECTIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERFECTIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROGRESSIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2312, - serialized_end=2391, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) - -_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( - name="Case", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Case", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="CASE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACCUSATIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVERBIAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMPLEMENTIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DATIVE", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GENITIVE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INSTRUMENTAL", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LOCATIVE", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMINATIVE", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OBLIQUE", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARTITIVE", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREPOSITIONAL", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REFLEXIVE_CASE", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RELATIVE_CASE", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VOCATIVE", - index=14, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2394, - serialized_end=2642, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) - -_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( - name="Form", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Form", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FORM_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADNOMIAL", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXILIARY", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMPLEMENTIZER", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FINAL_ENDING", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GERUND", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REALIS", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IRREALIS", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SHORT", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LONG", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ORDER", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SPECIFIC", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2645, - serialized_end=2820, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) - -_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( - name="Gender", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Gender", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="GENDER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FEMININE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MASCULINE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NEUTER", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2822, - serialized_end=2891, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) - -_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( - name="Mood", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Mood", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="MOOD_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONDITIONAL_MOOD", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERATIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INDICATIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INTERROGATIVE", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="JUSSIVE", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUBJUNCTIVE", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2893, - serialized_end=3020, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) - -_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( - name="Number", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Number", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="NUMBER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SINGULAR", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLURAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DUAL", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3022, - serialized_end=3086, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) - -_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( - name="Person", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Person", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PERSON_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FIRST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SECOND", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="THIRD", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REFLEXIVE_PERSON", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3088, - serialized_end=3172, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) - -_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( - name="Proper", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Proper", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PROPER_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROPER", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOT_PROPER", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3174, - serialized_end=3230, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) - -_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( - name="Reciprocity", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Reciprocity", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="RECIPROCITY_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RECIPROCAL", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NON_RECIPROCAL", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3232, - serialized_end=3306, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) - -_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( - name="Tense", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Tense", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TENSE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONDITIONAL_TENSE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FUTURE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PAST", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRESENT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMPERFECT", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLUPERFECT", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3308, - serialized_end=3423, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) - -_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( - name="Voice", - full_name="google.cloud.language.v1beta2.PartOfSpeech.Voice", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="VOICE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CAUSATIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PASSIVE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3425, - serialized_end=3491, -) -_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) - -_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( - name="Label", - full_name="google.cloud.language.v1beta2.DependencyEdge.Label", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ABBREV", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ACOMP", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVCL", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVMOD", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AMOD", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="APPOS", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ATTR", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUX", - index=8, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXPASS", - index=9, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CC", - index=10, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CCOMP", - index=11, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CONJ", - index=12, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CSUBJ", - index=13, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CSUBJPASS", - index=14, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DEP", - index=15, - number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DET", - index=16, - number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISCOURSE", - index=17, - number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DOBJ", - index=18, - number=18, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EXPL", - index=19, - number=19, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GOESWITH", - index=20, - number=20, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IOBJ", - index=21, - number=21, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MARK", - index=22, - number=22, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MWE", - index=23, - number=23, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MWV", - index=24, - number=24, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NEG", - index=25, - number=25, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NN", - index=26, - number=26, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NPADVMOD", - index=27, - number=27, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NSUBJ", - index=28, - number=28, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NSUBJPASS", - index=29, - number=29, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUM", - index=30, - number=30, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMBER", - index=31, - number=31, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="P", - index=32, - number=32, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARATAXIS", - index=33, - number=33, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PARTMOD", - index=34, - number=34, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PCOMP", - index=35, - number=35, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POBJ", - index=36, - number=36, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POSS", - index=37, - number=37, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POSTNEG", - index=38, - number=38, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRECOMP", - index=39, - number=39, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRECONJ", - index=40, - number=40, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREDET", - index=41, - number=41, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREF", - index=42, - number=42, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PREP", - index=43, - number=43, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRONL", - index=44, - number=44, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PRT", - index=45, - number=45, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PS", - index=46, - number=46, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="QUANTMOD", - index=47, - number=47, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RCMOD", - index=48, - number=48, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RCMODREL", - index=49, - number=49, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RDROP", - index=50, - number=50, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REF", - index=51, - number=51, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REMNANT", - index=52, - number=52, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REPARANDUM", - index=53, - number=53, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ROOT", - index=54, - number=54, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SNUM", - index=55, - number=55, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUFF", - index=56, - number=56, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TMOD", - index=57, - number=57, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TOPIC", - index=58, - number=58, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VMOD", - index=59, - number=59, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VOCATIVE", - index=60, - number=60, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="XCOMP", - index=61, - number=61, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUFFIX", - index=62, - number=62, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TITLE", - index=63, - number=63, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ADVPHMOD", - index=64, - number=64, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXCAUS", - index=65, - number=65, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUXVV", - index=66, - number=66, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DTMOD", - index=67, - number=67, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FOREIGN", - index=68, - number=68, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="KW", - index=69, - number=69, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LIST", - index=70, - number=70, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMC", - index=71, - number=71, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMCSUBJ", - index=72, - number=72, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NOMCSUBJPASS", - index=73, - number=73, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NUMC", - index=74, - number=74, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COP", - index=75, - number=75, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DISLOCATED", - index=76, - number=76, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ASP", - index=77, - number=77, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GMOD", - index=78, - number=78, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GOBJ", - index=79, - number=79, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INFMOD", - index=80, - number=80, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MES", - index=81, - number=81, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NCOMP", - index=82, - number=82, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3607, - serialized_end=4544, -) -_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) - -_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.language.v1beta2.EntityMention.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNKNOWN", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PROPER", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COMMON", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4745, - serialized_end=4793, -) -_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) - - -_DOCUMENT = _descriptor.Descriptor( - name="Document", - full_name="google.cloud.language.v1beta2.Document", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1beta2.Document.type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="content", - full_name="google.cloud.language.v1beta2.Document.content", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gcs_content_uri", - full_name="google.cloud.language.v1beta2.Document.gcs_content_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.Document.language", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DOCUMENT_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="source", - full_name="google.cloud.language.v1beta2.Document.source", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ) - ], - serialized_start=215, - serialized_end=415, -) - - -_SENTENCE = _descriptor.Descriptor( - name="Sentence", - full_name="google.cloud.language.v1beta2.Sentence", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1beta2.Sentence.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1beta2.Sentence.sentiment", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=417, - serialized_end=543, -) - - -_ENTITY_METADATAENTRY = _descriptor.Descriptor( - name="MetadataEntry", - full_name="google.cloud.language.v1beta2.Entity.MetadataEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.language.v1beta2.Entity.MetadataEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.language.v1beta2.Entity.MetadataEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=842, - serialized_end=889, -) - -_ENTITY = _descriptor.Descriptor( - name="Entity", - full_name="google.cloud.language.v1beta2.Entity", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.language.v1beta2.Entity.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1beta2.Entity.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.cloud.language.v1beta2.Entity.metadata", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="salience", - full_name="google.cloud.language.v1beta2.Entity.salience", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mentions", - full_name="google.cloud.language.v1beta2.Entity.mentions", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1beta2.Entity.sentiment", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_ENTITY_METADATAENTRY], - enum_types=[_ENTITY_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=546, - serialized_end=1077, -) - - -_TOKEN = _descriptor.Descriptor( - name="Token", - full_name="google.cloud.language.v1beta2.Token", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1beta2.Token.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="part_of_speech", - full_name="google.cloud.language.v1beta2.Token.part_of_speech", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dependency_edge", - full_name="google.cloud.language.v1beta2.Token.dependency_edge", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="lemma", - full_name="google.cloud.language.v1beta2.Token.lemma", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1080, - serialized_end=1298, -) - - -_SENTIMENT = _descriptor.Descriptor( - name="Sentiment", - full_name="google.cloud.language.v1beta2.Sentiment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="magnitude", - full_name="google.cloud.language.v1beta2.Sentiment.magnitude", - index=0, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="score", - full_name="google.cloud.language.v1beta2.Sentiment.score", - index=1, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1300, - serialized_end=1345, -) - - -_PARTOFSPEECH = _descriptor.Descriptor( - name="PartOfSpeech", - full_name="google.cloud.language.v1beta2.PartOfSpeech", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="tag", - full_name="google.cloud.language.v1beta2.PartOfSpeech.tag", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="aspect", - full_name="google.cloud.language.v1beta2.PartOfSpeech.aspect", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="case", - full_name="google.cloud.language.v1beta2.PartOfSpeech.case", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="form", - full_name="google.cloud.language.v1beta2.PartOfSpeech.form", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gender", - full_name="google.cloud.language.v1beta2.PartOfSpeech.gender", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mood", - full_name="google.cloud.language.v1beta2.PartOfSpeech.mood", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="number", - full_name="google.cloud.language.v1beta2.PartOfSpeech.number", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="person", - full_name="google.cloud.language.v1beta2.PartOfSpeech.person", - index=7, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="proper", - full_name="google.cloud.language.v1beta2.PartOfSpeech.proper", - index=8, - number=9, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="reciprocity", - full_name="google.cloud.language.v1beta2.PartOfSpeech.reciprocity", - index=9, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tense", - full_name="google.cloud.language.v1beta2.PartOfSpeech.tense", - index=10, - number=11, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="voice", - full_name="google.cloud.language.v1beta2.PartOfSpeech.voice", - index=11, - number=12, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[ - _PARTOFSPEECH_TAG, - _PARTOFSPEECH_ASPECT, - _PARTOFSPEECH_CASE, - _PARTOFSPEECH_FORM, - _PARTOFSPEECH_GENDER, - _PARTOFSPEECH_MOOD, - _PARTOFSPEECH_NUMBER, - _PARTOFSPEECH_PERSON, - _PARTOFSPEECH_PROPER, - _PARTOFSPEECH_RECIPROCITY, - _PARTOFSPEECH_TENSE, - _PARTOFSPEECH_VOICE, - ], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1348, - serialized_end=3491, -) - - -_DEPENDENCYEDGE = _descriptor.Descriptor( - name="DependencyEdge", - full_name="google.cloud.language.v1beta2.DependencyEdge", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="head_token_index", - full_name="google.cloud.language.v1beta2.DependencyEdge.head_token_index", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label", - full_name="google.cloud.language.v1beta2.DependencyEdge.label", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DEPENDENCYEDGE_LABEL], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3494, - serialized_end=4544, -) - - -_ENTITYMENTION = _descriptor.Descriptor( - name="EntityMention", - full_name="google.cloud.language.v1beta2.EntityMention", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text", - full_name="google.cloud.language.v1beta2.EntityMention.text", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.language.v1beta2.EntityMention.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment", - full_name="google.cloud.language.v1beta2.EntityMention.sentiment", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_ENTITYMENTION_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4547, - serialized_end=4793, -) - - -_TEXTSPAN = _descriptor.Descriptor( - name="TextSpan", - full_name="google.cloud.language.v1beta2.TextSpan", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="content", - full_name="google.cloud.language.v1beta2.TextSpan.content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="begin_offset", - full_name="google.cloud.language.v1beta2.TextSpan.begin_offset", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4795, - serialized_end=4844, -) - - -_CLASSIFICATIONCATEGORY = _descriptor.Descriptor( - name="ClassificationCategory", - full_name="google.cloud.language.v1beta2.ClassificationCategory", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.language.v1beta2.ClassificationCategory.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="confidence", - full_name="google.cloud.language.v1beta2.ClassificationCategory.confidence", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4846, - serialized_end=4904, -) - - -_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( - name="AnalyzeSentimentRequest", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4907, - serialized_end=5064, -) - - -_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( - name="AnalyzeSentimentResponse", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document_sentiment", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentResponse.document_sentiment", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1beta2.AnalyzeSentimentResponse.sentences", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5067, - serialized_end=5241, -) - - -_ANALYZEENTITYSENTIMENTREQUEST = _descriptor.Descriptor( - name="AnalyzeEntitySentimentRequest", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5244, - serialized_end=5407, -) - - -_ANALYZEENTITYSENTIMENTRESPONSE = _descriptor.Descriptor( - name="AnalyzeEntitySentimentResponse", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.entities", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5409, - serialized_end=5516, -) - - -_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( - name="AnalyzeEntitiesRequest", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5519, - serialized_end=5675, -) - - -_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( - name="AnalyzeEntitiesResponse", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesResponse.entities", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.AnalyzeEntitiesResponse.language", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5677, - serialized_end=5777, -) - - -_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( - name="AnalyzeSyntaxRequest", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxRequest.encoding_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5780, - serialized_end=5934, -) - - -_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( - name="AnalyzeSyntaxResponse", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxResponse.sentences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tokens", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxResponse.tokens", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.AnalyzeSyntaxResponse.language", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5937, - serialized_end=6092, -) - - -_CLASSIFYTEXTREQUEST = _descriptor.Descriptor( - name="ClassifyTextRequest", - full_name="google.cloud.language.v1beta2.ClassifyTextRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.ClassifyTextRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6094, - serialized_end=6179, -) - - -_CLASSIFYTEXTRESPONSE = _descriptor.Descriptor( - name="ClassifyTextResponse", - full_name="google.cloud.language.v1beta2.ClassifyTextResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="categories", - full_name="google.cloud.language.v1beta2.ClassifyTextResponse.categories", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6181, - serialized_end=6278, -) - - -_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( - name="Features", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="extract_syntax", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_entities", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_document_sentiment", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extract_entity_sentiment", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="classify_text", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.Features.classify_text", - index=4, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6521, - serialized_end=6674, -) - -_ANNOTATETEXTREQUEST = _descriptor.Descriptor( - name="AnnotateTextRequest", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="document", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.document", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="features", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.features", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="encoding_type", - full_name="google.cloud.language.v1beta2.AnnotateTextRequest.encoding_type", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_ANNOTATETEXTREQUEST_FEATURES], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6281, - serialized_end=6674, -) - - -_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( - name="AnnotateTextResponse", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sentences", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.sentences", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="tokens", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.tokens", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entities", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.entities", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="document_sentiment", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.document_sentiment", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.language", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="categories", - full_name="google.cloud.language.v1beta2.AnnotateTextResponse.categories", - index=5, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6677, - serialized_end=7033, -) - -_DOCUMENT.fields_by_name["type"].enum_type = _DOCUMENT_TYPE -_DOCUMENT_TYPE.containing_type = _DOCUMENT -_DOCUMENT.oneofs_by_name["source"].fields.append(_DOCUMENT.fields_by_name["content"]) -_DOCUMENT.fields_by_name["content"].containing_oneof = _DOCUMENT.oneofs_by_name[ - "source" -] -_DOCUMENT.oneofs_by_name["source"].fields.append( - _DOCUMENT.fields_by_name["gcs_content_uri"] -) -_DOCUMENT.fields_by_name["gcs_content_uri"].containing_oneof = _DOCUMENT.oneofs_by_name[ - "source" -] -_SENTENCE.fields_by_name["text"].message_type = _TEXTSPAN -_SENTENCE.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITY_METADATAENTRY.containing_type = _ENTITY -_ENTITY.fields_by_name["type"].enum_type = _ENTITY_TYPE -_ENTITY.fields_by_name["metadata"].message_type = _ENTITY_METADATAENTRY -_ENTITY.fields_by_name["mentions"].message_type = _ENTITYMENTION -_ENTITY.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITY_TYPE.containing_type = _ENTITY -_TOKEN.fields_by_name["text"].message_type = _TEXTSPAN -_TOKEN.fields_by_name["part_of_speech"].message_type = _PARTOFSPEECH -_TOKEN.fields_by_name["dependency_edge"].message_type = _DEPENDENCYEDGE -_PARTOFSPEECH.fields_by_name["tag"].enum_type = _PARTOFSPEECH_TAG -_PARTOFSPEECH.fields_by_name["aspect"].enum_type = _PARTOFSPEECH_ASPECT -_PARTOFSPEECH.fields_by_name["case"].enum_type = _PARTOFSPEECH_CASE -_PARTOFSPEECH.fields_by_name["form"].enum_type = _PARTOFSPEECH_FORM -_PARTOFSPEECH.fields_by_name["gender"].enum_type = _PARTOFSPEECH_GENDER -_PARTOFSPEECH.fields_by_name["mood"].enum_type = _PARTOFSPEECH_MOOD -_PARTOFSPEECH.fields_by_name["number"].enum_type = _PARTOFSPEECH_NUMBER -_PARTOFSPEECH.fields_by_name["person"].enum_type = _PARTOFSPEECH_PERSON -_PARTOFSPEECH.fields_by_name["proper"].enum_type = _PARTOFSPEECH_PROPER -_PARTOFSPEECH.fields_by_name["reciprocity"].enum_type = _PARTOFSPEECH_RECIPROCITY -_PARTOFSPEECH.fields_by_name["tense"].enum_type = _PARTOFSPEECH_TENSE -_PARTOFSPEECH.fields_by_name["voice"].enum_type = _PARTOFSPEECH_VOICE -_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH -_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH -_DEPENDENCYEDGE.fields_by_name["label"].enum_type = _DEPENDENCYEDGE_LABEL -_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE -_ENTITYMENTION.fields_by_name["text"].message_type = _TEXTSPAN -_ENTITYMENTION.fields_by_name["type"].enum_type = _ENTITYMENTION_TYPE -_ENTITYMENTION.fields_by_name["sentiment"].message_type = _SENTIMENT -_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION -_ANALYZESENTIMENTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZESENTIMENTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZESENTIMENTRESPONSE.fields_by_name["document_sentiment"].message_type = _SENTIMENT -_ANALYZESENTIMENTRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZEENTITYSENTIMENTRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANALYZEENTITIESREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZEENTITIESREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZEENTITIESRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANALYZESYNTAXREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANALYZESYNTAXREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANALYZESYNTAXRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANALYZESYNTAXRESPONSE.fields_by_name["tokens"].message_type = _TOKEN -_CLASSIFYTEXTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_CLASSIFYTEXTRESPONSE.fields_by_name[ - "categories" -].message_type = _CLASSIFICATIONCATEGORY -_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST -_ANNOTATETEXTREQUEST.fields_by_name["document"].message_type = _DOCUMENT -_ANNOTATETEXTREQUEST.fields_by_name[ - "features" -].message_type = _ANNOTATETEXTREQUEST_FEATURES -_ANNOTATETEXTREQUEST.fields_by_name["encoding_type"].enum_type = _ENCODINGTYPE -_ANNOTATETEXTRESPONSE.fields_by_name["sentences"].message_type = _SENTENCE -_ANNOTATETEXTRESPONSE.fields_by_name["tokens"].message_type = _TOKEN -_ANNOTATETEXTRESPONSE.fields_by_name["entities"].message_type = _ENTITY -_ANNOTATETEXTRESPONSE.fields_by_name["document_sentiment"].message_type = _SENTIMENT -_ANNOTATETEXTRESPONSE.fields_by_name[ - "categories" -].message_type = _CLASSIFICATIONCATEGORY -DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT -DESCRIPTOR.message_types_by_name["Sentence"] = _SENTENCE -DESCRIPTOR.message_types_by_name["Entity"] = _ENTITY -DESCRIPTOR.message_types_by_name["Token"] = _TOKEN -DESCRIPTOR.message_types_by_name["Sentiment"] = _SENTIMENT -DESCRIPTOR.message_types_by_name["PartOfSpeech"] = _PARTOFSPEECH -DESCRIPTOR.message_types_by_name["DependencyEdge"] = _DEPENDENCYEDGE -DESCRIPTOR.message_types_by_name["EntityMention"] = _ENTITYMENTION -DESCRIPTOR.message_types_by_name["TextSpan"] = _TEXTSPAN -DESCRIPTOR.message_types_by_name["ClassificationCategory"] = _CLASSIFICATIONCATEGORY -DESCRIPTOR.message_types_by_name["AnalyzeSentimentRequest"] = _ANALYZESENTIMENTREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeSentimentResponse"] = _ANALYZESENTIMENTRESPONSE -DESCRIPTOR.message_types_by_name[ - "AnalyzeEntitySentimentRequest" -] = _ANALYZEENTITYSENTIMENTREQUEST -DESCRIPTOR.message_types_by_name[ - "AnalyzeEntitySentimentResponse" -] = _ANALYZEENTITYSENTIMENTRESPONSE -DESCRIPTOR.message_types_by_name["AnalyzeEntitiesRequest"] = _ANALYZEENTITIESREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeEntitiesResponse"] = _ANALYZEENTITIESRESPONSE -DESCRIPTOR.message_types_by_name["AnalyzeSyntaxRequest"] = _ANALYZESYNTAXREQUEST -DESCRIPTOR.message_types_by_name["AnalyzeSyntaxResponse"] = _ANALYZESYNTAXRESPONSE -DESCRIPTOR.message_types_by_name["ClassifyTextRequest"] = _CLASSIFYTEXTREQUEST -DESCRIPTOR.message_types_by_name["ClassifyTextResponse"] = _CLASSIFYTEXTRESPONSE -DESCRIPTOR.message_types_by_name["AnnotateTextRequest"] = _ANNOTATETEXTREQUEST -DESCRIPTOR.message_types_by_name["AnnotateTextResponse"] = _ANNOTATETEXTRESPONSE -DESCRIPTOR.enum_types_by_name["EncodingType"] = _ENCODINGTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Document = _reflection.GeneratedProtocolMessageType( - "Document", - (_message.Message,), - { - "DESCRIPTOR": _DOCUMENT, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """################################################################ # - Represents the input to API methods. - - Attributes: - type: - Required. If the type is not set or is ``TYPE_UNSPECIFIED``, - returns an ``INVALID_ARGUMENT`` error. - source: - The source of the document: a string containing the content or - a Google Cloud Storage URI. - content: - The content of the input in string format. Cloud audit logging - exempt since it is based on user data. - gcs_content_uri: - The Google Cloud Storage URI where the file content is - located. This URI must be of the form: - gs://bucket_name/object_name. For more details, see - https://cloud.google.com/storage/docs/reference-uris. NOTE: - Cloud Storage object versioning is not supported. - language: - The language of the document (if not specified, the language - is automatically detected). Both ISO and BCP-47 language codes - are accepted. `Language Support - `__ - lists currently supported languages for each API method. If - the language (either specified by the caller or automatically - detected) is not supported by the called API method, an - ``INVALID_ARGUMENT`` error is returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Document) - }, -) -_sym_db.RegisterMessage(Document) - -Sentence = _reflection.GeneratedProtocolMessageType( - "Sentence", - (_message.Message,), - { - "DESCRIPTOR": _SENTENCE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents a sentence in the input document. - - Attributes: - text: - The sentence text. - sentiment: - For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F - eatures.extract_document_sentiment][google.cloud.language.v1be - ta2.AnnotateTextRequest.Features.extract_document_sentiment] - is set to true, this field will contain the sentiment for the - sentence. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentence) - }, -) -_sym_db.RegisterMessage(Sentence) - -Entity = _reflection.GeneratedProtocolMessageType( - "Entity", - (_message.Message,), - { - "MetadataEntry": _reflection.GeneratedProtocolMessageType( - "MetadataEntry", - (_message.Message,), - { - "DESCRIPTOR": _ENTITY_METADATAENTRY, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity.MetadataEntry) - }, - ), - "DESCRIPTOR": _ENTITY, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents a phrase in the text that is a known entity, such as a - person, an organization, or location. The API associates information, - such as salience and mentions, with entities. - - Attributes: - name: - The representative name for the entity. - type: - The entity type. - metadata: - Metadata associated with the entity. For most entity types, - the metadata is a Wikipedia URL (``wikipedia_url``) and - Knowledge Graph MID (``mid``), if they are available. For the - metadata associated with other entity types, see the Type - table below. - salience: - The salience score associated with the entity in the [0, 1.0] - range. The salience score for an entity provides information - about the importance or centrality of that entity to the - entire document text. Scores closer to 0 are less salient, - while scores closer to 1.0 are highly salient. - mentions: - The mentions of this entity in the input document. The API - currently supports proper noun mentions. - sentiment: - For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq - uest.Features.extract_entity_sentiment][google.cloud.language. - v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the aggregate - sentiment expressed for this entity in the provided document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity) - }, -) -_sym_db.RegisterMessage(Entity) -_sym_db.RegisterMessage(Entity.MetadataEntry) - -Token = _reflection.GeneratedProtocolMessageType( - "Token", - (_message.Message,), - { - "DESCRIPTOR": _TOKEN, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents the smallest syntactic building block of the text. - - Attributes: - text: - The token text. - part_of_speech: - Parts of speech tag for this token. - dependency_edge: - Dependency tree parse for this token. - lemma: - \ `Lemma - `__ of - the token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Token) - }, -) -_sym_db.RegisterMessage(Token) - -Sentiment = _reflection.GeneratedProtocolMessageType( - "Sentiment", - (_message.Message,), - { - "DESCRIPTOR": _SENTIMENT, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents the feeling associated with the entire text or entities in - the text. Next ID: 6 - - Attributes: - magnitude: - A non-negative number in the [0, +inf) range, which represents - the absolute magnitude of sentiment regardless of score - (positive or negative). - score: - Sentiment score between -1.0 (negative sentiment) and 1.0 - (positive sentiment). - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentiment) - }, -) -_sym_db.RegisterMessage(Sentiment) - -PartOfSpeech = _reflection.GeneratedProtocolMessageType( - "PartOfSpeech", - (_message.Message,), - { - "DESCRIPTOR": _PARTOFSPEECH, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents part of speech information for a token. - - Attributes: - tag: - The part of speech tag. - aspect: - The grammatical aspect. - case: - The grammatical case. - form: - The grammatical form. - gender: - The grammatical gender. - mood: - The grammatical mood. - number: - The grammatical number. - person: - The grammatical person. - proper: - The grammatical properness. - reciprocity: - The grammatical reciprocity. - tense: - The grammatical tense. - voice: - The grammatical voice. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.PartOfSpeech) - }, -) -_sym_db.RegisterMessage(PartOfSpeech) - -DependencyEdge = _reflection.GeneratedProtocolMessageType( - "DependencyEdge", - (_message.Message,), - { - "DESCRIPTOR": _DEPENDENCYEDGE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents dependency parse tree information for a token. - - Attributes: - head_token_index: - Represents the head of this token in the dependency tree. This - is the index of the token which has an arc going to this - token. The index is the position of the token in the array of - tokens returned by the API method. If this token is a root - token, then the ``head_token_index`` is its own index. - label: - The parse label for the token. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.DependencyEdge) - }, -) -_sym_db.RegisterMessage(DependencyEdge) - -EntityMention = _reflection.GeneratedProtocolMessageType( - "EntityMention", - (_message.Message,), - { - "DESCRIPTOR": _ENTITYMENTION, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents a mention for an entity in the text. Currently, proper noun - mentions are supported. - - Attributes: - text: - The mention text. - type: - The type of the entity mention. - sentiment: - For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq - uest.Features.extract_entity_sentiment][google.cloud.language. - v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - is set to true, this field will contain the sentiment - expressed for this mention of the entity in the provided - document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.EntityMention) - }, -) -_sym_db.RegisterMessage(EntityMention) - -TextSpan = _reflection.GeneratedProtocolMessageType( - "TextSpan", - (_message.Message,), - { - "DESCRIPTOR": _TEXTSPAN, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents an output piece of text. - - Attributes: - content: - The content of the output text. - begin_offset: - The API calculates the beginning offset of the content in the - original document according to the - [EncodingType][google.cloud.language.v1beta2.EncodingType] - specified in the API request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.TextSpan) - }, -) -_sym_db.RegisterMessage(TextSpan) - -ClassificationCategory = _reflection.GeneratedProtocolMessageType( - "ClassificationCategory", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFICATIONCATEGORY, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """Represents a category returned from the text classifier. - - Attributes: - name: - The name of the category representing the document, from the - `predefined taxonomy `__. - confidence: - The classifier’s confidence of the category. Number represents - how certain the classifier is that this category represents - the given text. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassificationCategory) - }, -) -_sym_db.RegisterMessage(ClassificationCategory) - -AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeSentimentRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESENTIMENTREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The sentiment analysis request message. - - Attributes: - document: - Required. Input document. - encoding_type: - The encoding type used by the API to calculate sentence - offsets for the sentence sentiment. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeSentimentRequest) - -AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeSentimentResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESENTIMENTRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The sentiment analysis response message. - - Attributes: - document_sentiment: - The overall sentiment of the input document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1beta2.Document.language] field for more - details. - sentences: - The sentiment for all the sentences in the document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeSentimentResponse) - -AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitySentimentRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITYSENTIMENTREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The entity-level sentiment analysis request message. - - Attributes: - document: - Required. Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitySentimentRequest) - -AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitySentimentResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITYSENTIMENTRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The entity-level sentiment analysis response message. - - Attributes: - entities: - The recognized entities in the input document with associated - sentiments. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1beta2.Document.language] field for more - details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitySentimentResponse) - -AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitiesRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITIESREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The entity analysis request message. - - Attributes: - document: - Required. Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitiesRequest) - -AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeEntitiesResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEENTITIESRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The entity analysis response message. - - Attributes: - entities: - The recognized entities in the input document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1beta2.Document.language] field for more - details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeEntitiesResponse) - -AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzeSyntaxRequest", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESYNTAXREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The syntax analysis request message. - - Attributes: - document: - Required. Input document. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxRequest) - }, -) -_sym_db.RegisterMessage(AnalyzeSyntaxRequest) - -AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzeSyntaxResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZESYNTAXRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The syntax analysis response message. - - Attributes: - sentences: - Sentences in the input document. - tokens: - Tokens, along with their syntactic information, in the input - document. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1beta2.Document.language] field for more - details. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxResponse) - }, -) -_sym_db.RegisterMessage(AnalyzeSyntaxResponse) - -ClassifyTextRequest = _reflection.GeneratedProtocolMessageType( - "ClassifyTextRequest", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFYTEXTREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The document classification request message. - - Attributes: - document: - Required. Input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextRequest) - }, -) -_sym_db.RegisterMessage(ClassifyTextRequest) - -ClassifyTextResponse = _reflection.GeneratedProtocolMessageType( - "ClassifyTextResponse", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFYTEXTRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The document classification response message. - - Attributes: - categories: - Categories representing the input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextResponse) - }, -) -_sym_db.RegisterMessage(ClassifyTextResponse) - -AnnotateTextRequest = _reflection.GeneratedProtocolMessageType( - "AnnotateTextRequest", - (_message.Message,), - { - "Features": _reflection.GeneratedProtocolMessageType( - "Features", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATETEXTREQUEST_FEATURES, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """All available features for sentiment, syntax, and semantic analysis. - Setting each one to true will enable that specific analysis for the - input. Next ID: 10 - - Attributes: - extract_syntax: - Extract syntax information. - extract_entities: - Extract entities. - extract_document_sentiment: - Extract document-level sentiment. - extract_entity_sentiment: - Extract entities and their associated sentiment. - classify_text: - Classify the full document into categories. If this is true, - the API will use the default model which classifies into a - `predefined taxonomy `__. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest.Features) - }, - ), - "DESCRIPTOR": _ANNOTATETEXTREQUEST, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The request message for the text annotation API, which can perform - multiple analysis types (sentiment, entities, and syntax) in one call. - - Attributes: - document: - Required. Input document. - features: - Required. The enabled features. - encoding_type: - The encoding type used by the API to calculate offsets. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest) - }, -) -_sym_db.RegisterMessage(AnnotateTextRequest) -_sym_db.RegisterMessage(AnnotateTextRequest.Features) - -AnnotateTextResponse = _reflection.GeneratedProtocolMessageType( - "AnnotateTextResponse", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATETEXTRESPONSE, - "__module__": "google.cloud.language_v1beta2.proto.language_service_pb2", - "__doc__": """The text annotations response message. - - Attributes: - sentences: - Sentences in the input document. Populated if the user enables - [AnnotateTextRequest.Features.extract_syntax][google.cloud.lan - guage.v1beta2.AnnotateTextRequest.Features.extract_syntax]. - tokens: - Tokens, along with their syntactic information, in the input - document. Populated if the user enables [AnnotateTextRequest.F - eatures.extract_syntax][google.cloud.language.v1beta2.Annotate - TextRequest.Features.extract_syntax]. - entities: - Entities, along with their semantic information, in the input - document. Populated if the user enables [AnnotateTextRequest.F - eatures.extract_entities][google.cloud.language.v1beta2.Annota - teTextRequest.Features.extract_entities]. - document_sentiment: - The overall sentiment for the document. Populated if the user - enables [AnnotateTextRequest.Features.extract_document_sentime - nt][google.cloud.language.v1beta2.AnnotateTextRequest.Features - .extract_document_sentiment]. - language: - The language of the text, which will be the same as the - language specified in the request or, if not specified, the - automatically-detected language. See [Document.language][googl - e.cloud.language.v1beta2.Document.language] field for more - details. - categories: - Categories identified in the input document. - """, - # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextResponse) - }, -) -_sym_db.RegisterMessage(AnnotateTextResponse) - - -DESCRIPTOR._options = None -_ENTITY_METADATAENTRY._options = None -_ANALYZESENTIMENTREQUEST.fields_by_name["document"]._options = None -_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"]._options = None -_ANALYZEENTITIESREQUEST.fields_by_name["document"]._options = None -_ANALYZESYNTAXREQUEST.fields_by_name["document"]._options = None -_CLASSIFYTEXTREQUEST.fields_by_name["document"]._options = None -_ANNOTATETEXTREQUEST.fields_by_name["document"]._options = None -_ANNOTATETEXTREQUEST.fields_by_name["features"]._options = None - -_LANGUAGESERVICE = _descriptor.ServiceDescriptor( - name="LanguageService", - full_name="google.cloud.language.v1beta2.LanguageService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\027language.googleapis.com\322A]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=7094, - serialized_end=8512, - methods=[ - _descriptor.MethodDescriptor( - name="AnalyzeSentiment", - full_name="google.cloud.language.v1beta2.LanguageService.AnalyzeSentiment", - index=0, - containing_service=None, - input_type=_ANALYZESENTIMENTREQUEST, - output_type=_ANALYZESENTIMENTRESPONSE, - serialized_options=b'\202\323\344\223\002("#/v1beta2/documents:analyzeSentiment:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeEntities", - full_name="google.cloud.language.v1beta2.LanguageService.AnalyzeEntities", - index=1, - containing_service=None, - input_type=_ANALYZEENTITIESREQUEST, - output_type=_ANALYZEENTITIESRESPONSE, - serialized_options=b'\202\323\344\223\002\'""/v1beta2/documents:analyzeEntities:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeEntitySentiment", - full_name="google.cloud.language.v1beta2.LanguageService.AnalyzeEntitySentiment", - index=2, - containing_service=None, - input_type=_ANALYZEENTITYSENTIMENTREQUEST, - output_type=_ANALYZEENTITYSENTIMENTRESPONSE, - serialized_options=b'\202\323\344\223\002.")/v1beta2/documents:analyzeEntitySentiment:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnalyzeSyntax", - full_name="google.cloud.language.v1beta2.LanguageService.AnalyzeSyntax", - index=3, - containing_service=None, - input_type=_ANALYZESYNTAXREQUEST, - output_type=_ANALYZESYNTAXRESPONSE, - serialized_options=b'\202\323\344\223\002%" /v1beta2/documents:analyzeSyntax:\001*\332A\026document,encoding_type\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ClassifyText", - full_name="google.cloud.language.v1beta2.LanguageService.ClassifyText", - index=4, - containing_service=None, - input_type=_CLASSIFYTEXTREQUEST, - output_type=_CLASSIFYTEXTRESPONSE, - serialized_options=b'\202\323\344\223\002$"\037/v1beta2/documents:classifyText:\001*\332A\010document', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="AnnotateText", - full_name="google.cloud.language.v1beta2.LanguageService.AnnotateText", - index=5, - containing_service=None, - input_type=_ANNOTATETEXTREQUEST, - output_type=_ANNOTATETEXTRESPONSE, - serialized_options=b'\202\323\344\223\002$"\037/v1beta2/documents:annotateText:\001*\332A\037document,features,encoding_type\332A\021document,features', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LANGUAGESERVICE) - -DESCRIPTOR.services_by_name["LanguageService"] = _LANGUAGESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py b/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py deleted file mode 100644 index 4db8cf82..00000000 --- a/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py +++ /dev/null @@ -1,142 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.language_v1beta2.proto import ( - language_service_pb2 as google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2, -) - - -class LanguageServiceStub(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnalyzeSentiment = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, - ) - self.AnalyzeEntities = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, - ) - self.AnalyzeEntitySentiment = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, - ) - self.AnalyzeSyntax = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, - ) - self.ClassifyText = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/ClassifyText", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextResponse.FromString, - ) - self.AnnotateText = channel.unary_unary( - "/google.cloud.language.v1beta2.LanguageService/AnnotateText", - request_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextResponse.FromString, - ) - - -class LanguageServiceServicer(object): - """Provides text analysis operations such as sentiment analysis and entity - recognition. - """ - - def AnalyzeSentiment(self, request, context): - """Analyzes the sentiment of the provided text. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeEntities(self, request, context): - """Finds named entities (currently proper names and common nouns) in the text - along with entity types, salience, mentions for each entity, and - other properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeEntitySentiment(self, request, context): - """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes - sentiment associated with each entity and its mentions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnalyzeSyntax(self, request, context): - """Analyzes the syntax of the text and provides sentence boundaries and - tokenization along with part-of-speech tags, dependency trees, and other - properties. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ClassifyText(self, request, context): - """Classifies a document into categories. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AnnotateText(self, request, context): - """A convenience method that provides all syntax, sentiment, entity, and - classification features in one call. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LanguageServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "AnalyzeSentiment": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSentiment, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, - ), - "AnalyzeEntities": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntities, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, - ), - "AnalyzeEntitySentiment": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeEntitySentiment, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, - ), - "AnalyzeSyntax": grpc.unary_unary_rpc_method_handler( - servicer.AnalyzeSyntax, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, - ), - "ClassifyText": grpc.unary_unary_rpc_method_handler( - servicer.ClassifyText, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.ClassifyTextResponse.SerializeToString, - ), - "AnnotateText": grpc.unary_unary_rpc_method_handler( - servicer.AnnotateText, - request_deserializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextRequest.FromString, - response_serializer=google_dot_cloud_dot_language__v1beta2_dot_proto_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.language.v1beta2.LanguageService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/google/cloud/language_v1beta2/services/language_service/async_client.py b/google/cloud/language_v1beta2/services/language_service/async_client.py index a1ab4d7d..bbb4be81 100644 --- a/google/cloud/language_v1beta2/services/language_service/async_client.py +++ b/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -74,8 +74,36 @@ class LanguageServiceAsyncClient: LanguageServiceClient.parse_common_location_path ) - from_service_account_info = LanguageServiceClient.from_service_account_info - from_service_account_file = LanguageServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_info.__func__(LanguageServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LanguageServiceAsyncClient: The constructed client. + """ + return LanguageServiceClient.from_service_account_file.__func__(LanguageServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -212,6 +240,7 @@ async def analyze_sentiment( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -295,6 +324,7 @@ async def analyze_entities( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -381,6 +411,7 @@ async def analyze_entity_sentiment( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -463,6 +494,7 @@ async def analyze_syntax( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -536,6 +568,7 @@ async def classify_text( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, @@ -630,6 +663,7 @@ async def annotate_text( predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py index 40c75c69..9eba35d5 100644 --- a/google/cloud/language_v1beta2/services/language_service/client.py +++ b/google/cloud/language_v1beta2/services/language_service/client.py @@ -270,21 +270,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -327,7 +323,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/base.py b/google/cloud/language_v1beta2/services/language_service/transports/base.py index 4e4f7add..65a1685c 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/base.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -70,10 +70,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -81,6 +81,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -90,20 +93,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -116,6 +116,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -129,6 +130,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -142,6 +144,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -155,6 +158,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -168,6 +172,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, @@ -181,6 +186,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=600.0, ), default_timeout=600.0, client_info=client_info, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py index 4a698c25..22f74961 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -88,6 +89,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -102,72 +107,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -175,17 +168,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -199,7 +183,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py index 0242e2a3..6bccccd9 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -62,7 +62,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -102,6 +102,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -133,12 +134,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -147,72 +152,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -220,17 +213,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/language_v1beta2/types.py b/google/cloud/language_v1beta2/types.py deleted file mode 100644 index 1a33a23e..00000000 --- a/google/cloud/language_v1beta2/types.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -import sys - -from google.api import http_pb2 -from google.longrunning import operations_pb2 -from google.protobuf import any_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - -from google.api_core.protobuf_helpers import get_messages -from google.cloud.language_v1beta2.proto import language_service_pb2 - - -_shared_modules = [ - http_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [language_service_pb2] - -names = [] - -for module in _shared_modules: - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) - -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.language_v1beta2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/language_v1beta2/types/__init__.py b/google/cloud/language_v1beta2/types/__init__.py index 4598667d..025cbb98 100644 --- a/google/cloud/language_v1beta2/types/__init__.py +++ b/google/cloud/language_v1beta2/types/__init__.py @@ -16,53 +16,53 @@ # from .language_service import ( - Document, - Sentence, - Entity, - Token, - Sentiment, - PartOfSpeech, - DependencyEdge, - EntityMention, - TextSpan, - ClassificationCategory, - AnalyzeSentimentRequest, - AnalyzeSentimentResponse, - AnalyzeEntitySentimentRequest, - AnalyzeEntitySentimentResponse, AnalyzeEntitiesRequest, AnalyzeEntitiesResponse, + AnalyzeEntitySentimentRequest, + AnalyzeEntitySentimentResponse, + AnalyzeSentimentRequest, + AnalyzeSentimentResponse, AnalyzeSyntaxRequest, AnalyzeSyntaxResponse, - ClassifyTextRequest, - ClassifyTextResponse, AnnotateTextRequest, AnnotateTextResponse, + ClassificationCategory, + ClassifyTextRequest, + ClassifyTextResponse, + DependencyEdge, + Document, + Entity, + EntityMention, + PartOfSpeech, + Sentence, + Sentiment, + TextSpan, + Token, EncodingType, ) __all__ = ( - "Document", - "Sentence", - "Entity", - "Token", - "Sentiment", - "PartOfSpeech", - "DependencyEdge", - "EntityMention", - "TextSpan", - "ClassificationCategory", - "AnalyzeSentimentRequest", - "AnalyzeSentimentResponse", - "AnalyzeEntitySentimentRequest", - "AnalyzeEntitySentimentResponse", "AnalyzeEntitiesRequest", "AnalyzeEntitiesResponse", + "AnalyzeEntitySentimentRequest", + "AnalyzeEntitySentimentResponse", + "AnalyzeSentimentRequest", + "AnalyzeSentimentResponse", "AnalyzeSyntaxRequest", "AnalyzeSyntaxResponse", - "ClassifyTextRequest", - "ClassifyTextResponse", "AnnotateTextRequest", "AnnotateTextResponse", + "ClassificationCategory", + "ClassifyTextRequest", + "ClassifyTextResponse", + "DependencyEdge", + "Document", + "Entity", + "EntityMention", + "PartOfSpeech", + "Sentence", + "Sentiment", + "TextSpan", + "Token", "EncodingType", ) diff --git a/noxfile.py b/noxfile.py index 9427793d..4d37cd3a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,22 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -70,20 +87,23 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".") + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.language", - "--cov=google.cloud", - "--cov=tests.unit", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", @@ -102,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -111,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -123,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -143,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -175,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa94931..f08bc22c 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py index b90eef00..97bf7da8 100644 --- a/samples/snippets/api/noxfile.py +++ b/samples/snippets/api/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -84,7 +85,7 @@ def get_pytest_env_vars(): # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/classify_text/noxfile.py b/samples/snippets/classify_text/noxfile.py index b90eef00..97bf7da8 100644 --- a/samples/snippets/classify_text/noxfile.py +++ b/samples/snippets/classify_text/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -84,7 +85,7 @@ def get_pytest_env_vars(): # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/cloud-client/v1/noxfile.py b/samples/snippets/cloud-client/v1/noxfile.py index b90eef00..97bf7da8 100644 --- a/samples/snippets/cloud-client/v1/noxfile.py +++ b/samples/snippets/cloud-client/v1/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -84,7 +85,7 @@ def get_pytest_env_vars(): # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/generated-samples/v1/noxfile.py b/samples/snippets/generated-samples/v1/noxfile.py index b90eef00..97bf7da8 100644 --- a/samples/snippets/generated-samples/v1/noxfile.py +++ b/samples/snippets/generated-samples/v1/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -84,7 +85,7 @@ def get_pytest_env_vars(): # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/sentiment/noxfile.py b/samples/snippets/sentiment/noxfile.py index b90eef00..97bf7da8 100644 --- a/samples/snippets/sentiment/noxfile.py +++ b/samples/snippets/sentiment/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -84,7 +85,7 @@ def get_pytest_env_vars(): # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/setup.py b/setup.py index b0bac6b2..a6ee9706 100644 --- a/setup.py +++ b/setup.py @@ -31,9 +31,8 @@ dependencies = [ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.10.0", - "libcst >= 0.2.5", ] -extras = {} +extras = {"libcst": "libcst >= 0.2.5"} # Setup boilerplate below this line. diff --git a/synth.metadata b/synth.metadata index 98b94222..6ed319cd 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-language.git", - "sha": "3476c0f72529cbcbe61ea5c7e6a22291777bed7e" + "remote": "git@github.com:googleapis/python-language.git", + "sha": "6139396d5d42339bf67363faee230ada85d65b48" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "520682435235d9c503983a360a2090025aa47cd1", - "internalRef": "350246057" + "sha": "915925089600094e72e4bfa8cf586c170e6b7109", + "internalRef": "366152684" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984" + "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f" } } ], @@ -49,119 +49,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/language_v1/language_service.rst", - "docs/language_v1/services.rst", - "docs/language_v1/types.rst", - "docs/language_v1beta2/language_service.rst", - "docs/language_v1beta2/services.rst", - "docs/language_v1beta2/types.rst", - "docs/multiprocessing.rst", - "google/cloud/language/__init__.py", - "google/cloud/language/py.typed", - "google/cloud/language_v1/__init__.py", - "google/cloud/language_v1/proto/language_service.proto", - "google/cloud/language_v1/py.typed", - "google/cloud/language_v1/services/__init__.py", - "google/cloud/language_v1/services/language_service/__init__.py", - "google/cloud/language_v1/services/language_service/async_client.py", - "google/cloud/language_v1/services/language_service/client.py", - "google/cloud/language_v1/services/language_service/transports/__init__.py", - "google/cloud/language_v1/services/language_service/transports/base.py", - "google/cloud/language_v1/services/language_service/transports/grpc.py", - "google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py", - "google/cloud/language_v1/types/__init__.py", - "google/cloud/language_v1/types/language_service.py", - "google/cloud/language_v1beta2/__init__.py", - "google/cloud/language_v1beta2/proto/language_service.proto", - "google/cloud/language_v1beta2/py.typed", - "google/cloud/language_v1beta2/services/__init__.py", - "google/cloud/language_v1beta2/services/language_service/__init__.py", - "google/cloud/language_v1beta2/services/language_service/async_client.py", - "google/cloud/language_v1beta2/services/language_service/client.py", - "google/cloud/language_v1beta2/services/language_service/transports/__init__.py", - "google/cloud/language_v1beta2/services/language_service/transports/base.py", - "google/cloud/language_v1beta2/services/language_service/transports/grpc.py", - "google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py", - "google/cloud/language_v1beta2/types/__init__.py", - "google/cloud/language_v1beta2/types/language_service.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/api/noxfile.py", - "samples/snippets/classify_text/noxfile.py", - "samples/snippets/cloud-client/v1/noxfile.py", - "samples/snippets/generated-samples/v1/noxfile.py", - "samples/snippets/sentiment/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_language_v1_keywords.py", - "scripts/fixup_language_v1beta2_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/language_v1/__init__.py", - "tests/unit/gapic/language_v1/test_language_service.py", - "tests/unit/gapic/language_v1beta2/__init__.py", - "tests/unit/gapic/language_v1beta2/test_language_service.py" ] } \ No newline at end of file diff --git a/synth.py b/synth.py index d1aec55f..c770dcfb 100644 --- a/synth.py +++ b/synth.py @@ -33,12 +33,12 @@ bazel_target=f"//google/cloud/language/{version}:language-{version}-py", include_protos=True, ) - s.move(library, excludes=["docs/index.rst", "README.rst", "setup.py"]) + s.move(library, excludes=["docs/index.rst", "README.rst", "setup.py"]) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=99, samples=True, microgenerator=True,) +templated_files = common.py_library(cov_level=98, samples=True, microgenerator=True,) s.move(templated_files, excludes=['.coveragerc']) @@ -50,4 +50,4 @@ python.py_samples(skip_readmes=True) -s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 00000000..8f70f412 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. + +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.10.0 +libcst==0.2.5 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 00000000..da93009b --- /dev/null +++ b/testing/constraints-3.7.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 00000000..da93009b --- /dev/null +++ b/testing/constraints-3.8.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 00000000..da93009b --- /dev/null +++ b/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/tests/unit/gapic/language_v1/__init__.py b/tests/unit/gapic/language_v1/__init__.py index 8b137891..42ffdf2b 100644 --- a/tests/unit/gapic/language_v1/__init__.py +++ b/tests/unit/gapic/language_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py index d2c1fbff..dbcd0244 100644 --- a/tests/unit/gapic/language_v1/test_language_service.py +++ b/tests/unit/gapic/language_v1/test_language_service.py @@ -85,15 +85,19 @@ def test__get_default_mtls_endpoint(): ) -def test_language_service_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [LanguageServiceClient, LanguageServiceAsyncClient,] +) +def test_language_service_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = LanguageServiceClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "language.googleapis.com:443" @@ -109,9 +113,11 @@ def test_language_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "language.googleapis.com:443" @@ -172,7 +178,7 @@ def test_language_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -188,7 +194,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -204,7 +210,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -232,7 +238,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -293,29 +299,25 @@ def test_language_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -324,66 +326,53 @@ def test_language_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -409,7 +398,7 @@ def test_language_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -439,7 +428,7 @@ def test_language_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -458,7 +447,7 @@ def test_language_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -503,6 +492,24 @@ def test_analyze_sentiment_from_dict(): test_analyze_sentiment(request_type=dict) +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), "__call__" + ) as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSentimentRequest() + + @pytest.mark.asyncio async def test_analyze_sentiment_async( transport: str = "grpc_asyncio", @@ -678,6 +685,22 @@ def test_analyze_entities_from_dict(): test_analyze_entities(request_type=dict) +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.analyze_entities), "__call__") as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitiesRequest() + + @pytest.mark.asyncio async def test_analyze_entities_async( transport: str = "grpc_asyncio", @@ -849,6 +872,24 @@ def test_analyze_entity_sentiment_from_dict(): test_analyze_entity_sentiment(request_type=dict) +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), "__call__" + ) as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + @pytest.mark.asyncio async def test_analyze_entity_sentiment_async( transport: str = "grpc_asyncio", @@ -1024,6 +1065,22 @@ def test_analyze_syntax_from_dict(): test_analyze_syntax(request_type=dict) +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.analyze_syntax), "__call__") as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSyntaxRequest() + + @pytest.mark.asyncio async def test_analyze_syntax_async( transport: str = "grpc_asyncio", request_type=language_service.AnalyzeSyntaxRequest @@ -1188,6 +1245,22 @@ def test_classify_text_from_dict(): test_classify_text(request_type=dict) +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.classify_text), "__call__") as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.ClassifyTextRequest() + + @pytest.mark.asyncio async def test_classify_text_async( transport: str = "grpc_asyncio", request_type=language_service.ClassifyTextRequest @@ -1346,6 +1419,22 @@ def test_annotate_text_from_dict(): test_annotate_text(request_type=dict) +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_text), "__call__") as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnnotateTextRequest() + + @pytest.mark.asyncio async def test_annotate_text_async( transport: str = "grpc_asyncio", request_type=language_service.AnnotateTextRequest @@ -1662,6 +1751,54 @@ def test_language_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_language_service_host_no_port(): client = LanguageServiceClient( credentials=credentials.AnonymousCredentials(), @@ -1706,6 +1843,8 @@ def test_language_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1761,6 +1900,8 @@ def test_language_service_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ diff --git a/tests/unit/gapic/language_v1beta2/__init__.py b/tests/unit/gapic/language_v1beta2/__init__.py index 8b137891..42ffdf2b 100644 --- a/tests/unit/gapic/language_v1beta2/__init__.py +++ b/tests/unit/gapic/language_v1beta2/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py index c25ca765..ab2cc3d6 100644 --- a/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -87,15 +87,19 @@ def test__get_default_mtls_endpoint(): ) -def test_language_service_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [LanguageServiceClient, LanguageServiceAsyncClient,] +) +def test_language_service_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = LanguageServiceClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "language.googleapis.com:443" @@ -111,9 +115,11 @@ def test_language_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "language.googleapis.com:443" @@ -174,7 +180,7 @@ def test_language_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -190,7 +196,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -206,7 +212,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -234,7 +240,7 @@ def test_language_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -295,29 +301,25 @@ def test_language_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -326,66 +328,53 @@ def test_language_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -411,7 +400,7 @@ def test_language_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -441,7 +430,7 @@ def test_language_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -460,7 +449,7 @@ def test_language_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -505,6 +494,24 @@ def test_analyze_sentiment_from_dict(): test_analyze_sentiment(request_type=dict) +def test_analyze_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_sentiment), "__call__" + ) as call: + client.analyze_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSentimentRequest() + + @pytest.mark.asyncio async def test_analyze_sentiment_async( transport: str = "grpc_asyncio", @@ -680,6 +687,22 @@ def test_analyze_entities_from_dict(): test_analyze_entities(request_type=dict) +def test_analyze_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.analyze_entities), "__call__") as call: + client.analyze_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitiesRequest() + + @pytest.mark.asyncio async def test_analyze_entities_async( transport: str = "grpc_asyncio", @@ -851,6 +874,24 @@ def test_analyze_entity_sentiment_from_dict(): test_analyze_entity_sentiment(request_type=dict) +def test_analyze_entity_sentiment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_entity_sentiment), "__call__" + ) as call: + client.analyze_entity_sentiment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeEntitySentimentRequest() + + @pytest.mark.asyncio async def test_analyze_entity_sentiment_async( transport: str = "grpc_asyncio", @@ -1026,6 +1067,22 @@ def test_analyze_syntax_from_dict(): test_analyze_syntax(request_type=dict) +def test_analyze_syntax_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.analyze_syntax), "__call__") as call: + client.analyze_syntax() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnalyzeSyntaxRequest() + + @pytest.mark.asyncio async def test_analyze_syntax_async( transport: str = "grpc_asyncio", request_type=language_service.AnalyzeSyntaxRequest @@ -1190,6 +1247,22 @@ def test_classify_text_from_dict(): test_classify_text(request_type=dict) +def test_classify_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.classify_text), "__call__") as call: + client.classify_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.ClassifyTextRequest() + + @pytest.mark.asyncio async def test_classify_text_async( transport: str = "grpc_asyncio", request_type=language_service.ClassifyTextRequest @@ -1348,6 +1421,22 @@ def test_annotate_text_from_dict(): test_annotate_text(request_type=dict) +def test_annotate_text_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LanguageServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_text), "__call__") as call: + client.annotate_text() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == language_service.AnnotateTextRequest() + + @pytest.mark.asyncio async def test_annotate_text_async( transport: str = "grpc_asyncio", request_type=language_service.AnnotateTextRequest @@ -1664,6 +1753,54 @@ def test_language_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + ], +) +def test_language_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_language_service_host_no_port(): client = LanguageServiceClient( credentials=credentials.AnonymousCredentials(), @@ -1708,6 +1845,8 @@ def test_language_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1763,6 +1902,8 @@ def test_language_service_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [