From b9aa813819f4c19844d96f4e3e126bf9c18d8760 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 30 Oct 2021 01:56:47 +0000 Subject: [PATCH 1/4] chore: use gapic-generator-python 0.53.4 docs: list oneofs in docstring committer: busunkim96@ PiperOrigin-RevId: 406468269 Source-Link: https://github.com/googleapis/googleapis/commit/83d81b0c8fc22291a13398d6d77f02dc97a5b6f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ff001fbacb9e77e71d734de5f955c05fdae8526 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZmMDAxZmJhY2I5ZTc3ZTcxZDczNGRlNWY5NTVjMDVmZGFlODUyNiJ9 --- owl-bot-staging/spanner/v1/.coveragerc | 17 + owl-bot-staging/spanner/v1/MANIFEST.in | 2 + owl-bot-staging/spanner/v1/README.rst | 49 + owl-bot-staging/spanner/v1/docs/conf.py | 376 ++ owl-bot-staging/spanner/v1/docs/index.rst | 7 + .../spanner/v1/docs/spanner_v1/services.rst | 6 + .../spanner/v1/docs/spanner_v1/spanner.rst | 10 + .../spanner/v1/docs/spanner_v1/types.rst | 7 + .../v1/google/cloud/spanner/__init__.py | 97 + .../spanner/v1/google/cloud/spanner/py.typed | 2 + .../v1/google/cloud/spanner_v1/__init__.py | 98 + .../cloud/spanner_v1/gapic_metadata.json | 173 + .../v1/google/cloud/spanner_v1/py.typed | 2 + .../cloud/spanner_v1/services/__init__.py | 15 + .../spanner_v1/services/spanner/__init__.py | 22 + .../services/spanner/async_client.py | 1486 +++++ .../spanner_v1/services/spanner/client.py | 1621 +++++ .../spanner_v1/services/spanner/pagers.py | 140 + .../services/spanner/transports/__init__.py | 33 + .../services/spanner/transports/base.py | 425 ++ .../services/spanner/transports/grpc.py | 756 +++ .../spanner/transports/grpc_asyncio.py | 760 +++ .../google/cloud/spanner_v1/types/__init__.py | 108 + .../cloud/spanner_v1/types/commit_response.py | 75 + .../v1/google/cloud/spanner_v1/types/keys.py | 240 + .../google/cloud/spanner_v1/types/mutation.py | 192 + .../cloud/spanner_v1/types/query_plan.py | 200 + .../cloud/spanner_v1/types/result_set.py | 316 + .../google/cloud/spanner_v1/types/spanner.py | 1263 ++++ .../cloud/spanner_v1/types/transaction.py | 596 ++ .../v1/google/cloud/spanner_v1/types/type.py | 137 + owl-bot-staging/spanner/v1/mypy.ini | 3 + owl-bot-staging/spanner/v1/noxfile.py | 132 + .../v1/scripts/fixup_spanner_v1_keywords.py | 190 + owl-bot-staging/spanner/v1/setup.py | 54 + owl-bot-staging/spanner/v1/tests/__init__.py | 16 + .../spanner/v1/tests/unit/__init__.py | 16 + .../spanner/v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/spanner_v1/__init__.py | 16 + .../unit/gapic/spanner_v1/test_spanner.py | 4046 ++++++++++++ .../spanner_admin_database/v1/.coveragerc | 17 + .../spanner_admin_database/v1/MANIFEST.in | 2 + .../spanner_admin_database/v1/README.rst | 49 + .../spanner_admin_database/v1/docs/conf.py | 376 ++ .../spanner_admin_database/v1/docs/index.rst | 7 + .../database_admin.rst | 10 + .../spanner_admin_database_v1/services.rst | 6 + .../docs/spanner_admin_database_v1/types.rst | 7 + .../cloud/spanner_admin_database/__init__.py | 91 + .../cloud/spanner_admin_database/py.typed | 2 + .../spanner_admin_database_v1/__init__.py | 92 + .../gapic_metadata.json | 193 + .../cloud/spanner_admin_database_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/database_admin/__init__.py | 22 + .../services/database_admin/async_client.py | 1968 ++++++ .../services/database_admin/client.py | 2116 ++++++ .../services/database_admin/pagers.py | 508 ++ .../database_admin/transports/__init__.py | 33 + .../database_admin/transports/base.py | 467 ++ .../database_admin/transports/grpc.py | 802 +++ .../database_admin/transports/grpc_asyncio.py | 806 +++ .../types/__init__.py | 92 + .../spanner_admin_database_v1/types/backup.py | 638 ++ .../spanner_admin_database_v1/types/common.py | 117 + .../types/spanner_database_admin.py | 826 +++ .../spanner_admin_database/v1/mypy.ini | 3 + .../spanner_admin_database/v1/noxfile.py | 132 + ...ixup_spanner_admin_database_v1_keywords.py | 192 + .../spanner_admin_database/v1/setup.py | 55 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_database_v1/__init__.py | 16 + .../test_database_admin.py | 5880 +++++++++++++++++ .../spanner_admin_instance/v1/.coveragerc | 17 + .../spanner_admin_instance/v1/MANIFEST.in | 2 + .../spanner_admin_instance/v1/README.rst | 49 + .../spanner_admin_instance/v1/docs/conf.py | 376 ++ .../spanner_admin_instance/v1/docs/index.rst | 7 + .../instance_admin.rst | 10 + .../spanner_admin_instance_v1/services.rst | 6 + .../docs/spanner_admin_instance_v1/types.rst | 7 + .../cloud/spanner_admin_instance/__init__.py | 51 + .../cloud/spanner_admin_instance/py.typed | 2 + .../spanner_admin_instance_v1/__init__.py | 52 + .../gapic_metadata.json | 123 + .../cloud/spanner_admin_instance_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/instance_admin/__init__.py | 22 + .../services/instance_admin/async_client.py | 1301 ++++ .../services/instance_admin/client.py | 1464 ++++ .../services/instance_admin/pagers.py | 262 + .../instance_admin/transports/__init__.py | 33 + .../instance_admin/transports/base.py | 325 + .../instance_admin/transports/grpc.py | 638 ++ .../instance_admin/transports/grpc_asyncio.py | 642 ++ .../types/__init__.py | 48 + .../types/spanner_instance_admin.py | 605 ++ .../spanner_admin_instance/v1/mypy.ini | 3 + .../spanner_admin_instance/v1/noxfile.py | 132 + ...ixup_spanner_admin_instance_v1_keywords.py | 185 + .../spanner_admin_instance/v1/setup.py | 55 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_instance_v1/__init__.py | 16 + .../test_instance_admin.py | 3779 +++++++++++ 108 files changed, 39587 insertions(+) create mode 100644 owl-bot-staging/spanner/v1/.coveragerc create mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner/v1/README.rst create mode 100644 owl-bot-staging/spanner/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py create mode 100644 owl-bot-staging/spanner/v1/mypy.ini create mode 100644 owl-bot-staging/spanner/v1/noxfile.py create mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py create mode 100644 owl-bot-staging/spanner/v1/setup.py create mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc new file mode 100644 index 0000000000..98857dd0c9 --- /dev/null +++ b/owl-bot-staging/spanner/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in new file mode 100644 index 0000000000..2bde59af4d --- /dev/null +++ b/owl-bot-staging/spanner/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/spanner *.py +recursive-include google/cloud/spanner_v1 *.py diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst new file mode 100644 index 0000000000..d6abf98111 --- /dev/null +++ b/owl-bot-staging/spanner/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Spanner API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py new file mode 100644 index 0000000000..c87b4a6a95 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-spanner.tex", + u"google-cloud-spanner Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-spanner", + u"Google Cloud Spanner Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-spanner", + u"google-cloud-spanner Documentation", + author, + "google-cloud-spanner", + "GAPIC library for Google Cloud Spanner API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst new file mode 100644 index 0000000000..ee873ac9c1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_v1/services + spanner_v1/types diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst new file mode 100644 index 0000000000..3bbbb55f79 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst new file mode 100644 index 0000000000..b51f4447e4 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst @@ -0,0 +1,10 @@ +Spanner +------------------------- + +.. automodule:: google.cloud.spanner_v1.services.spanner + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_v1.services.spanner.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst new file mode 100644 index 0000000000..8678aba188 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Spanner v1 API +===================================== + +.. automodule:: google.cloud.spanner_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py new file mode 100644 index 0000000000..6a24892b8b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.spanner_v1.services.spanner.client import SpannerClient +from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient + +from google.cloud.spanner_v1.types.commit_response import CommitResponse +from google.cloud.spanner_v1.types.keys import KeyRange +from google.cloud.spanner_v1.types.keys import KeySet +from google.cloud.spanner_v1.types.mutation import Mutation +from google.cloud.spanner_v1.types.query_plan import PlanNode +from google.cloud.spanner_v1.types.query_plan import QueryPlan +from google.cloud.spanner_v1.types.result_set import PartialResultSet +from google.cloud.spanner_v1.types.result_set import ResultSet +from google.cloud.spanner_v1.types.result_set import ResultSetMetadata +from google.cloud.spanner_v1.types.result_set import ResultSetStats +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse +from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest +from google.cloud.spanner_v1.types.spanner import CommitRequest +from google.cloud.spanner_v1.types.spanner import CreateSessionRequest +from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse +from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest +from google.cloud.spanner_v1.types.spanner import GetSessionRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsResponse +from google.cloud.spanner_v1.types.spanner import Partition +from google.cloud.spanner_v1.types.spanner import PartitionOptions +from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest +from google.cloud.spanner_v1.types.spanner import PartitionReadRequest +from google.cloud.spanner_v1.types.spanner import PartitionResponse +from google.cloud.spanner_v1.types.spanner import ReadRequest +from google.cloud.spanner_v1.types.spanner import RequestOptions +from google.cloud.spanner_v1.types.spanner import RollbackRequest +from google.cloud.spanner_v1.types.spanner import Session +from google.cloud.spanner_v1.types.transaction import Transaction +from google.cloud.spanner_v1.types.transaction import TransactionOptions +from google.cloud.spanner_v1.types.transaction import TransactionSelector +from google.cloud.spanner_v1.types.type import StructType +from google.cloud.spanner_v1.types.type import Type +from google.cloud.spanner_v1.types.type import TypeCode + +__all__ = ('SpannerClient', + 'SpannerAsyncClient', + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py new file mode 100644 index 0000000000..16e8c9c6f1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.spanner import SpannerClient +from .services.spanner import SpannerAsyncClient + +from .types.commit_response import CommitResponse +from .types.keys import KeyRange +from .types.keys import KeySet +from .types.mutation import Mutation +from .types.query_plan import PlanNode +from .types.query_plan import QueryPlan +from .types.result_set import PartialResultSet +from .types.result_set import ResultSet +from .types.result_set import ResultSetMetadata +from .types.result_set import ResultSetStats +from .types.spanner import BatchCreateSessionsRequest +from .types.spanner import BatchCreateSessionsResponse +from .types.spanner import BeginTransactionRequest +from .types.spanner import CommitRequest +from .types.spanner import CreateSessionRequest +from .types.spanner import DeleteSessionRequest +from .types.spanner import ExecuteBatchDmlRequest +from .types.spanner import ExecuteBatchDmlResponse +from .types.spanner import ExecuteSqlRequest +from .types.spanner import GetSessionRequest +from .types.spanner import ListSessionsRequest +from .types.spanner import ListSessionsResponse +from .types.spanner import Partition +from .types.spanner import PartitionOptions +from .types.spanner import PartitionQueryRequest +from .types.spanner import PartitionReadRequest +from .types.spanner import PartitionResponse +from .types.spanner import ReadRequest +from .types.spanner import RequestOptions +from .types.spanner import RollbackRequest +from .types.spanner import Session +from .types.transaction import Transaction +from .types.transaction import TransactionOptions +from .types.transaction import TransactionSelector +from .types.type import StructType +from .types.type import Type +from .types.type import TypeCode + +__all__ = ( + 'SpannerAsyncClient', +'BatchCreateSessionsRequest', +'BatchCreateSessionsResponse', +'BeginTransactionRequest', +'CommitRequest', +'CommitResponse', +'CreateSessionRequest', +'DeleteSessionRequest', +'ExecuteBatchDmlRequest', +'ExecuteBatchDmlResponse', +'ExecuteSqlRequest', +'GetSessionRequest', +'KeyRange', +'KeySet', +'ListSessionsRequest', +'ListSessionsResponse', +'Mutation', +'PartialResultSet', +'Partition', +'PartitionOptions', +'PartitionQueryRequest', +'PartitionReadRequest', +'PartitionResponse', +'PlanNode', +'QueryPlan', +'ReadRequest', +'RequestOptions', +'ResultSet', +'ResultSetMetadata', +'ResultSetStats', +'RollbackRequest', +'Session', +'SpannerClient', +'StructType', +'Transaction', +'TransactionOptions', +'TransactionSelector', +'Type', +'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json new file mode 100644 index 0000000000..a6b16725c3 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json @@ -0,0 +1,173 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_v1", + "protoPackage": "google.spanner.v1", + "schema": "1.0", + "services": { + "Spanner": { + "clients": { + "grpc": { + "libraryClient": "SpannerClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SpannerAsyncClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py new file mode 100644 index 0000000000..4de65971c2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py new file mode 100644 index 0000000000..a0fb0c707d --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SpannerClient +from .async_client import SpannerAsyncClient + +__all__ = ( + 'SpannerClient', + 'SpannerAsyncClient', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py new file mode 100644 index 0000000000..2f65c8b008 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py @@ -0,0 +1,1486 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport +from .client import SpannerClient + + +class SpannerAsyncClient: + """Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + _client: SpannerClient + + DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT + + database_path = staticmethod(SpannerClient.database_path) + parse_database_path = staticmethod(SpannerClient.parse_database_path) + session_path = staticmethod(SpannerClient.session_path) + parse_session_path = staticmethod(SpannerClient.parse_session_path) + common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(SpannerClient.common_folder_path) + parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) + common_organization_path = staticmethod(SpannerClient.common_organization_path) + parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) + common_project_path = staticmethod(SpannerClient.common_project_path) + parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) + common_location_path = staticmethod(SpannerClient.common_location_path) + parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(SpannerClient).get_transport_class, type(SpannerClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, SpannerTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SpannerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SpannerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_session(self, + request: Union[spanner.CreateSessionRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it is a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner may delete sessions for which no + operations are sent for more than an hour. If a session is + deleted, requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, e.g., ``"SELECT 1"``. + + Args: + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (:class:`str`): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.CreateSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_sessions(self, + request: Union[spanner.BatchCreateSessionsRequest, dict] = None, + *, + database: str = None, + session_count: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Args: + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (:class:`str`): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (:class:`int`): + Required. The number of sessions to be created in this + batch call. The API may return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + BatchCreateSessions (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, session_count]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.BatchCreateSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_create_sessions, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_session(self, + request: Union[spanner.GetSessionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session does not + exist. This is mainly useful for determining whether a session + is still alive. + + Args: + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (:class:`str`): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.GetSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions(self, + request: Union[spanner.ListSessionsRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists all sessions in a given database. + + Args: + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (:class:`str`): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_sessions, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_session(self, + request: Union[spanner.DeleteSessionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This will asynchronously trigger cancellation + of any operations that are running with this session. + + Args: + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (:class:`str`): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.DeleteSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def execute_sql(self, + request: Union[spanner.ExecuteSqlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method cannot be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_sql, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Union[spanner.ExecuteSqlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_streaming_sql, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def execute_batch_dml(self, + request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list + of [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. + 2. If the status was not OK, check the number of + result sets in the response. If the response + contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + - Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + - Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement + failed, and the fourth and fifth statements were + not executed. + + """ + # Create or coerce a protobuf request object. + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_batch_dml, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def read(self, + request: Union[spanner.ReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + cannot be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for + [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.read, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Union[spanner.ReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for + [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_read, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def begin_transaction(self, + request: Union[spanner.BeginTransactionRequest, dict] = None, + *, + session: str = None, + options: transaction.TransactionOptions = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Args: + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (:class:`str`): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, options]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.begin_transaction, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit(self, + request: Union[spanner.CommitRequest, dict] = None, + *, + session: str = None, + transaction_id: bytes = None, + mutations: Sequence[mutation.Mutation] = None, + single_use_transaction: transaction.TransactionOptions = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + re-attempt the transaction from the beginning, re-using the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Args: + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (:class:`str`): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (:class:`Sequence[google.cloud.spanner_v1.types.Mutation]`): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it is + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, transaction_id, mutations, single_use_transaction]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + if mutations: + request.mutations.extend(mutations) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback(self, + request: Union[spanner.RollbackRequest, dict] = None, + *, + session: str = None, + transaction_id: bytes = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It is a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction is not found. ``Rollback`` never returns + ``ABORTED``. + + Args: + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (:class:`str`): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, transaction_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def partition_query(self, + request: Union[spanner.PartitionQueryRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + PartitionQueryRequest used to create the partition tokens and + the ExecuteSqlRequests that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.partition_query, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def partition_read(self, + request: Union[spanner.PartitionReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + PartitionReadRequest used to create the partition tokens and the + ReadRequests that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual StreamingRead + call issued with a partition_token. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.partition_read, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SpannerAsyncClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py new file mode 100644 index 0000000000..ca138e5b6c --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py @@ -0,0 +1,1621 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Iterable, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SpannerGrpcTransport +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport + + +class SpannerClientMeta(type): + """Metaclass for the Spanner client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] + _transport_registry["grpc"] = SpannerGrpcTransport + _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[SpannerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SpannerClient(metaclass=SpannerClientMeta): + """Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def session_path(project: str,instance: str,database: str,session: str,) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str,str]: + """Parses a session path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SpannerTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SpannerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SpannerTransport): + # transport is a SpannerTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def create_session(self, + request: Union[spanner.CreateSessionRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it is a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner may delete sessions for which no + operations are sent for more than an hour. If a session is + deleted, requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, e.g., ``"SELECT 1"``. + + Args: + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (str): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.CreateSessionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.CreateSessionRequest): + request = spanner.CreateSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_sessions(self, + request: Union[spanner.BatchCreateSessionsRequest, dict] = None, + *, + database: str = None, + session_count: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Args: + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (str): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (int): + Required. The number of sessions to be created in this + batch call. The API may return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + BatchCreateSessions (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, session_count]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.BatchCreateSessionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.BatchCreateSessionsRequest): + request = spanner.BatchCreateSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_session(self, + request: Union[spanner.GetSessionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session does not + exist. This is mainly useful for determining whether a session + is still alive. + + Args: + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (str): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.GetSessionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.GetSessionRequest): + request = spanner.GetSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions(self, + request: Union[spanner.ListSessionsRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists all sessions in a given database. + + Args: + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (str): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ListSessionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ListSessionsRequest): + request = spanner.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_session(self, + request: Union[spanner.DeleteSessionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This will asynchronously trigger cancellation + of any operations that are running with this session. + + Args: + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (str): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.DeleteSessionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.DeleteSessionRequest): + request = spanner.DeleteSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def execute_sql(self, + request: Union[spanner.ExecuteSqlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method cannot be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ExecuteSqlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Union[spanner.ExecuteSqlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ExecuteSqlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_batch_dml(self, + request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list + of [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. + 2. If the status was not OK, check the number of + result sets in the response. If the response + contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + - Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + - Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement + failed, and the fourth and fifth statements were + not executed. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ExecuteBatchDmlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ExecuteBatchDmlRequest): + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def read(self, + request: Union[spanner.ReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + cannot be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for + [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ReadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Union[spanner.ReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for + [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.ReadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def begin_transaction(self, + request: Union[spanner.BeginTransactionRequest, dict] = None, + *, + session: str = None, + options: transaction.TransactionOptions = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Args: + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (str): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, options]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.BeginTransactionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.BeginTransactionRequest): + request = spanner.BeginTransactionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit(self, + request: Union[spanner.CommitRequest, dict] = None, + *, + session: str = None, + transaction_id: bytes = None, + mutations: Sequence[mutation.Mutation] = None, + single_use_transaction: transaction.TransactionOptions = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + re-attempt the transaction from the beginning, re-using the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Args: + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (str): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (Sequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it is + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, transaction_id, mutations, single_use_transaction]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.CommitRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.CommitRequest): + request = spanner.CommitRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if mutations is not None: + request.mutations = mutations + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback(self, + request: Union[spanner.RollbackRequest, dict] = None, + *, + session: str = None, + transaction_id: bytes = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It is a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction is not found. ``Rollback`` never returns + ``ABORTED``. + + Args: + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (str): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, transaction_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.RollbackRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.RollbackRequest): + request = spanner.RollbackRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def partition_query(self, + request: Union[spanner.PartitionQueryRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + PartitionQueryRequest used to create the partition tokens and + the ExecuteSqlRequests that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.PartitionQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.PartitionQueryRequest): + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def partition_read(self, + request: Union[spanner.PartitionReadRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + PartitionReadRequest used to create the partition tokens and the + ReadRequests that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual StreamingRead + call issued with a partition_token. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a spanner.PartitionReadRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.PartitionReadRequest): + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SpannerClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py new file mode 100644 index 0000000000..20b368c231 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.spanner_v1.types import spanner + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner.ListSessionsResponse], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner.ListSessionsResponse]], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[spanner.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py new file mode 100644 index 0000000000..de8947cdb7 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SpannerTransport +from .grpc import SpannerGrpcTransport +from .grpc_asyncio import SpannerGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] +_transport_registry['grpc'] = SpannerGrpcTransport +_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport + +__all__ = ( + 'SpannerTransport', + 'SpannerGrpcTransport', + 'SpannerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py new file mode 100644 index 0000000000..35b39925d1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -0,0 +1,425 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-spanner', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SpannerTransport(abc.ABC): + """Abstract transport class for Spanner.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_session: gapic_v1.method.wrap_method( + self.create_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_create_sessions: gapic_v1.method.wrap_method( + self.batch_create_sessions, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_session: gapic_v1.method.wrap_method( + self.get_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_session: gapic_v1.method.wrap_method( + self.delete_session, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_sql: gapic_v1.method.wrap_method( + self.execute_sql, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_streaming_sql: gapic_v1.method.wrap_method( + self.execute_streaming_sql, + default_timeout=3600.0, + client_info=client_info, + ), + self.execute_batch_dml: gapic_v1.method.wrap_method( + self.execute_batch_dml, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.read: gapic_v1.method.wrap_method( + self.read, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.streaming_read: gapic_v1.method.wrap_method( + self.streaming_read, + default_timeout=3600.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method.wrap_method( + self.begin_transaction, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.commit: gapic_v1.method.wrap_method( + self.commit, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method.wrap_method( + self.rollback, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_query: gapic_v1.method.wrap_method( + self.partition_query, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_read: gapic_v1.method.wrap_method( + self.partition_read, + default_retry=retries.Retry( +initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Union[ + spanner.BatchCreateSessionsResponse, + Awaitable[spanner.BatchCreateSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Union[ + spanner.ListSessionsResponse, + Awaitable[spanner.ListSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Union[ + spanner.ExecuteBatchDmlResponse, + Awaitable[spanner.ExecuteBatchDmlResponse] + ]]: + raise NotImplementedError() + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Union[ + transaction.Transaction, + Awaitable[transaction.Transaction] + ]]: + raise NotImplementedError() + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Union[ + commit_response.CommitResponse, + Awaitable[commit_response.CommitResponse] + ]]: + raise NotImplementedError() + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'SpannerTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py new file mode 100644 index 0000000000..c3a57b8481 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -0,0 +1,756 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO + + +class SpannerGrpcTransport(SpannerTransport): + """gRPC backend transport for Spanner. + + Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + spanner.Session]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it is a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner may delete sessions for which no + operations are sent for more than an hour. If a session is + deleted, requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, e.g., ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + spanner.BatchCreateSessionsResponse]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + ~.BatchCreateSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + spanner.Session]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session does not + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + spanner.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This will asynchronously trigger cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.ResultSet]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method cannot be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.PartialResultSet]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self.grpc_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + spanner.ExecuteBatchDmlResponse]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + ~.ExecuteBatchDmlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + result_set.ResultSet]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + cannot be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + result_set.PartialResultSet]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self.grpc_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + transaction.Transaction]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + ~.Transaction]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + commit_response.CommitResponse]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + re-attempt the transaction from the beginning, re-using the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + ~.CommitResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + empty_pb2.Empty]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It is a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction is not found. ``Rollback`` never returns + ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + PartitionQueryRequest used to create the partition tokens and + the ExecuteSqlRequests that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + PartitionReadRequest used to create the partition tokens and the + ReadRequests that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual StreamingRead + call issued with a partition_token. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + def close(self): + self.grpc_channel.close() + +__all__ = ( + 'SpannerGrpcTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py new file mode 100644 index 0000000000..5935e8fe10 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -0,0 +1,760 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO +from .grpc import SpannerGrpcTransport + + +class SpannerGrpcAsyncIOTransport(SpannerTransport): + """gRPC AsyncIO backend transport for Spanner. + + Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it is a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner may delete sessions for which no + operations are sent for more than an hour. If a session is + deleted, requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, e.g., ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Awaitable[spanner.BatchCreateSessionsResponse]]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + Awaitable[~.BatchCreateSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session does not + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Awaitable[spanner.ListSessionsResponse]]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This will asynchronously trigger cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method cannot be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self.grpc_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Awaitable[spanner.ExecuteBatchDmlResponse]]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + Awaitable[~.ExecuteBatchDmlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + cannot be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self.grpc_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Awaitable[transaction.Transaction]]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + Awaitable[~.Transaction]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Awaitable[commit_response.CommitResponse]]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + re-attempt the transaction from the beginning, re-using the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + Awaitable[~.CommitResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It is a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction is not found. ``Rollback`` never returns + ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + PartitionQueryRequest used to create the partition tokens and + the ExecuteSqlRequests that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + PartitionReadRequest used to create the partition tokens and the + ReadRequests that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual StreamingRead + call issued with a partition_token. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it is not possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self.grpc_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'SpannerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py new file mode 100644 index 0000000000..6a50fe1791 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .commit_response import ( + CommitResponse, +) +from .keys import ( + KeyRange, + KeySet, +) +from .mutation import ( + Mutation, +) +from .query_plan import ( + PlanNode, + QueryPlan, +) +from .result_set import ( + PartialResultSet, + ResultSet, + ResultSetMetadata, + ResultSetStats, +) +from .spanner import ( + BatchCreateSessionsRequest, + BatchCreateSessionsResponse, + BeginTransactionRequest, + CommitRequest, + CreateSessionRequest, + DeleteSessionRequest, + ExecuteBatchDmlRequest, + ExecuteBatchDmlResponse, + ExecuteSqlRequest, + GetSessionRequest, + ListSessionsRequest, + ListSessionsResponse, + Partition, + PartitionOptions, + PartitionQueryRequest, + PartitionReadRequest, + PartitionResponse, + ReadRequest, + RequestOptions, + RollbackRequest, + Session, +) +from .transaction import ( + Transaction, + TransactionOptions, + TransactionSelector, +) +from .type import ( + StructType, + Type, + TypeCode, +) + +__all__ = ( + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py new file mode 100644 index 0000000000..d0ee2dd830 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CommitResponse', + }, +) + + +class CommitResponse(proto.Message): + r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The Cloud Spanner timestamp at which the + transaction committed. + commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): + The statistics about this Commit. Not returned by default. + For more information, see + [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. + """ + + class CommitStats(proto.Message): + r"""Additional statistics about a commit. + + Attributes: + mutation_count (int): + The total number of mutations for the transaction. Knowing + the ``mutation_count`` value can help you maximize the + number of mutations in a transaction and minimize the number + of API round trips. You can also monitor this value to + prevent transactions from exceeding the system + `limit `__. + If the number of mutations exceeds the limit, the server + returns + `INVALID_ARGUMENT `__. + """ + + mutation_count = proto.Field( + proto.INT64, + number=1, + ) + + commit_timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + commit_stats = proto.Field( + proto.MESSAGE, + number=2, + message=CommitStats, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py new file mode 100644 index 0000000000..c7c1681917 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'KeyRange', + 'KeySet', + }, +) + + +class KeyRange(proto.Message): + r"""KeyRange represents a range of rows in a table or index. + + A range has a start key and an end key. These keys can be open or + closed, indicating if the range includes rows with that key. + + Keys are represented by lists, where the ith value in the list + corresponds to the ith component of the table or index primary key. + Individual values are encoded as described + [here][google.spanner.v1.TypeCode]. + + For example, consider the following table definition: + + :: + + CREATE TABLE UserEvents ( + UserName STRING(MAX), + EventDate STRING(10) + ) PRIMARY KEY(UserName, EventDate); + + The following keys name rows in this table: + + :: + + ["Bob", "2014-09-23"] + ["Alfred", "2015-06-12"] + + Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two + columns, each ``UserEvents`` key has two elements; the first is the + ``UserName``, and the second is the ``EventDate``. + + Key ranges with multiple components are interpreted + lexicographically by component using the table or index key's + declared sort order. For example, the following range returns all + events for user ``"Bob"`` that occurred in the year 2015: + + :: + + "start_closed": ["Bob", "2015-01-01"] + "end_closed": ["Bob", "2015-12-31"] + + Start and end keys can omit trailing key components. This affects + the inclusion and exclusion of rows that exactly match the provided + key components: if the key is closed, then rows that exactly match + the provided components are included; if the key is open, then rows + that exactly match are not included. + + For example, the following range includes all events for ``"Bob"`` + that occurred during and after the year 2000: + + :: + + "start_closed": ["Bob", "2000-01-01"] + "end_closed": ["Bob"] + + The next example retrieves all events for ``"Bob"``: + + :: + + "start_closed": ["Bob"] + "end_closed": ["Bob"] + + To retrieve events before the year 2000: + + :: + + "start_closed": ["Bob"] + "end_open": ["Bob", "2000-01-01"] + + The following range includes all rows in the table: + + :: + + "start_closed": [] + "end_closed": [] + + This range returns all users whose ``UserName`` begins with any + character from A to C: + + :: + + "start_closed": ["A"] + "end_open": ["D"] + + This range returns all users whose ``UserName`` begins with B: + + :: + + "start_closed": ["B"] + "end_open": ["C"] + + Key ranges honor column sort order. For example, suppose a table is + defined as follows: + + :: + + CREATE TABLE DescendingSortedTable { + Key INT64, + ... + ) PRIMARY KEY(Key DESC); + + The following range retrieves all rows with key values between 1 and + 100 inclusive: + + :: + + "start_closed": ["100"] + "end_closed": ["1"] + + Note that 100 is passed as the start, and 1 is passed as the end, + because ``Key`` is a descending column in the schema. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_closed (google.protobuf.struct_pb2.ListValue): + If the start is closed, then the range includes all rows + whose first ``len(start_closed)`` key columns exactly match + ``start_closed``. + This field is a member of `oneof`_ ``start_key_type``. + start_open (google.protobuf.struct_pb2.ListValue): + If the start is open, then the range excludes rows whose + first ``len(start_open)`` key columns exactly match + ``start_open``. + This field is a member of `oneof`_ ``start_key_type``. + end_closed (google.protobuf.struct_pb2.ListValue): + If the end is closed, then the range includes all rows whose + first ``len(end_closed)`` key columns exactly match + ``end_closed``. + This field is a member of `oneof`_ ``end_key_type``. + end_open (google.protobuf.struct_pb2.ListValue): + If the end is open, then the range excludes rows whose first + ``len(end_open)`` key columns exactly match ``end_open``. + This field is a member of `oneof`_ ``end_key_type``. + """ + + start_closed = proto.Field( + proto.MESSAGE, + number=1, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + start_open = proto.Field( + proto.MESSAGE, + number=2, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + end_closed = proto.Field( + proto.MESSAGE, + number=3, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + end_open = proto.Field( + proto.MESSAGE, + number=4, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + + +class KeySet(proto.Message): + r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key + ranges. All the keys are expected to be in the same table or index. + The keys need not be sorted in any particular way. + + If the same key is specified multiple times in the set (for example + if two ranges, two keys, or a key and a range overlap), Cloud + Spanner behaves as if the key were only specified once. + + Attributes: + keys (Sequence[google.protobuf.struct_pb2.ListValue]): + A list of specific keys. Entries in ``keys`` should have + exactly as many elements as there are columns in the primary + or index key with which this ``KeySet`` is used. Individual + key values are encoded as described + [here][google.spanner.v1.TypeCode]. + ranges (Sequence[google.cloud.spanner_v1.types.KeyRange]): + A list of key ranges. See + [KeyRange][google.spanner.v1.KeyRange] for more information + about key range specifications. + all_ (bool): + For convenience ``all`` can be set to ``true`` to indicate + that this ``KeySet`` matches all keys in the table or index. + Note that any keys specified in ``keys`` or ``ranges`` are + only yielded once. + """ + + keys = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.ListValue, + ) + ranges = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='KeyRange', + ) + all_ = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py new file mode 100644 index 0000000000..a58f1daa3c --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'Mutation', + }, +) + + +class Mutation(proto.Message): + r"""A modification to one or more Cloud Spanner rows. Mutations can be + applied to a Cloud Spanner database by sending them in a + [Commit][google.spanner.v1.Spanner.Commit] call. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + insert (google.cloud.spanner_v1.types.Mutation.Write): + Insert new rows in a table. If any of the rows already + exist, the write or transaction fails with error + ``ALREADY_EXISTS``. + This field is a member of `oneof`_ ``operation``. + update (google.cloud.spanner_v1.types.Mutation.Write): + Update existing rows in a table. If any of the rows does not + already exist, the transaction fails with error + ``NOT_FOUND``. + This field is a member of `oneof`_ ``operation``. + insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, then its column values are + overwritten with the ones provided. Any column values not + explicitly written are preserved. + + When using + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + just as when using + [insert][google.spanner.v1.Mutation.insert], all + ``NOT NULL`` columns in the table must be given a value. + This holds true even when the row already exists and will + therefore actually be updated. + This field is a member of `oneof`_ ``operation``. + replace (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, it is deleted, and the + column values provided are inserted instead. Unlike + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + this means any values not explicitly written become + ``NULL``. + + In an interleaved table, if you create the child table with + the ``ON DELETE CASCADE`` annotation, then replacing a + parent row also deletes the child rows. Otherwise, you must + delete the child rows before you replace the parent row. + This field is a member of `oneof`_ ``operation``. + delete (google.cloud.spanner_v1.types.Mutation.Delete): + Delete rows from a table. Succeeds whether or + not the named rows were present. + This field is a member of `oneof`_ ``operation``. + """ + + class Write(proto.Message): + r"""Arguments to [insert][google.spanner.v1.Mutation.insert], + [update][google.spanner.v1.Mutation.update], + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and + [replace][google.spanner.v1.Mutation.replace] operations. + + Attributes: + table (str): + Required. The table whose rows will be + written. + columns (Sequence[str]): + The names of the columns in + [table][google.spanner.v1.Mutation.Write.table] to be + written. + + The list of columns must contain enough columns to allow + Cloud Spanner to derive values for all primary key columns + in the row(s) to be modified. + values (Sequence[google.protobuf.struct_pb2.ListValue]): + The values to be written. ``values`` can contain more than + one list of values. If it does, then multiple rows are + written, one for each entry in ``values``. Each list in + ``values`` must have exactly as many entries as there are + entries in + [columns][google.spanner.v1.Mutation.Write.columns] above. + Sending multiple lists is equivalent to sending multiple + ``Mutation``\ s, each containing one ``values`` entry and + repeating [table][google.spanner.v1.Mutation.Write.table] + and [columns][google.spanner.v1.Mutation.Write.columns]. + Individual values in each list are encoded as described + [here][google.spanner.v1.TypeCode]. + """ + + table = proto.Field( + proto.STRING, + number=1, + ) + columns = proto.RepeatedField( + proto.STRING, + number=2, + ) + values = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=struct_pb2.ListValue, + ) + + class Delete(proto.Message): + r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. + + Attributes: + table (str): + Required. The table whose rows will be + deleted. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. The primary keys of the rows within + [table][google.spanner.v1.Mutation.Delete.table] to delete. + The primary keys must be specified in the order in which + they appear in the ``PRIMARY KEY()`` clause of the table's + equivalent DDL statement (the DDL statement used to create + the table). Delete is idempotent. The transaction will + succeed even if some or all rows do not exist. + """ + + table = proto.Field( + proto.STRING, + number=1, + ) + key_set = proto.Field( + proto.MESSAGE, + number=2, + message=keys.KeySet, + ) + + insert = proto.Field( + proto.MESSAGE, + number=1, + oneof='operation', + message=Write, + ) + update = proto.Field( + proto.MESSAGE, + number=2, + oneof='operation', + message=Write, + ) + insert_or_update = proto.Field( + proto.MESSAGE, + number=3, + oneof='operation', + message=Write, + ) + replace = proto.Field( + proto.MESSAGE, + number=4, + oneof='operation', + message=Write, + ) + delete = proto.Field( + proto.MESSAGE, + number=5, + oneof='operation', + message=Delete, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py new file mode 100644 index 0000000000..9df8081f1d --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'PlanNode', + 'QueryPlan', + }, +) + + +class PlanNode(proto.Message): + r"""Node information for nodes appearing in a + [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. + + Attributes: + index (int): + The ``PlanNode``'s index in [node + list][google.spanner.v1.QueryPlan.plan_nodes]. + kind (google.cloud.spanner_v1.types.PlanNode.Kind): + Used to determine the type of node. May be needed for + visualizing different kinds of nodes differently. For + example, If the node is a + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it + will have a condensed representation which can be used to + directly embed a description of the node in its parent. + display_name (str): + The display name for the node. + child_links (Sequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): + List of child node ``index``\ es and their relationship to + this parent. + short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): + Condensed representation for + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. + metadata (google.protobuf.struct_pb2.Struct): + Attributes relevant to the node contained in a group of + key-value pairs. For example, a Parameter Reference node + could have the following information in its metadata: + + :: + + { + "parameter_reference": "param1", + "parameter_type": "array" + } + execution_stats (google.protobuf.struct_pb2.Struct): + The execution statistics associated with the + node, contained in a group of key-value pairs. + Only present if the plan was returned as a + result of a profile query. For example, number + of executions, number of rows/time per execution + etc. + """ + class Kind(proto.Enum): + r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes + between the two different kinds of nodes that can appear in a query + plan. + """ + KIND_UNSPECIFIED = 0 + RELATIONAL = 1 + SCALAR = 2 + + class ChildLink(proto.Message): + r"""Metadata associated with a parent-child relationship appearing in a + [PlanNode][google.spanner.v1.PlanNode]. + + Attributes: + child_index (int): + The node to which the link points. + type_ (str): + The type of the link. For example, in Hash + Joins this could be used to distinguish between + the build child and the probe child, or in the + case of the child being an output variable, to + represent the tag associated with the output + variable. + variable (str): + Only present if the child node is + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and + corresponds to an output variable of the parent node. The + field carries the name of the output variable. For example, + a ``TableScan`` operator that reads rows from a table will + have child links to the ``SCALAR`` nodes representing the + output variables created for each column that is read by the + operator. The corresponding ``variable`` fields will be set + to the variable names assigned to the columns. + """ + + child_index = proto.Field( + proto.INT32, + number=1, + ) + type_ = proto.Field( + proto.STRING, + number=2, + ) + variable = proto.Field( + proto.STRING, + number=3, + ) + + class ShortRepresentation(proto.Message): + r"""Condensed representation of a node and its subtree. Only present for + ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. + + Attributes: + description (str): + A string representation of the expression + subtree rooted at this node. + subqueries (Sequence[google.cloud.spanner_v1.types.PlanNode.ShortRepresentation.SubqueriesEntry]): + A mapping of (subquery variable name) -> (subquery node id) + for cases where the ``description`` string of this node + references a ``SCALAR`` subquery contained in the expression + subtree rooted at this node. The referenced ``SCALAR`` + subquery may not necessarily be a direct child of this node. + """ + + description = proto.Field( + proto.STRING, + number=1, + ) + subqueries = proto.MapField( + proto.STRING, + proto.INT32, + number=2, + ) + + index = proto.Field( + proto.INT32, + number=1, + ) + kind = proto.Field( + proto.ENUM, + number=2, + enum=Kind, + ) + display_name = proto.Field( + proto.STRING, + number=3, + ) + child_links = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ChildLink, + ) + short_representation = proto.Field( + proto.MESSAGE, + number=5, + message=ShortRepresentation, + ) + metadata = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + execution_stats = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class QueryPlan(proto.Message): + r"""Contains an ordered list of nodes appearing in the query + plan. + + Attributes: + plan_nodes (Sequence[google.cloud.spanner_v1.types.PlanNode]): + The nodes in the query plan. Plan nodes are returned in + pre-order starting with the plan root. Each + [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds + to its index in ``plan_nodes``. + """ + + plan_nodes = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PlanNode', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py new file mode 100644 index 0000000000..27371dc89a --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.spanner_v1.types import query_plan as gs_query_plan +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'ResultSet', + 'PartialResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + }, +) + + +class ResultSet(proto.Message): + r"""Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. + rows (Sequence[google.protobuf.struct_pb2.ListValue]): + Each element in ``rows`` is a row whose format is defined by + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + The ith element in each row matches the ith field in + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + Elements are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the SQL statement + that produced this result set. These can be requested by + setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + DML statements always produce stats containing the number of + rows modified, unless executed using the + [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + Other fields may or may not be populated, based on the + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + """ + + metadata = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.ListValue, + ) + stats = proto.Field( + proto.MESSAGE, + number=3, + message='ResultSetStats', + ) + + +class PartialResultSet(proto.Message): + r"""Partial results from a streaming read or SQL query. Streaming + reads and SQL queries better tolerate large result sets, large + rows, and large values, but are a little trickier to consume. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. Only present in the first + response. + values (Sequence[google.protobuf.struct_pb2.Value]): + A streamed result set consists of a stream of values, which + might be split into many ``PartialResultSet`` messages to + accommodate large rows and/or large values. Every N complete + values defines a row, where N is equal to the number of + entries in + [metadata.row_type.fields][google.spanner.v1.StructType.fields]. + + Most values are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + + It is possible that the last value in values is "chunked", + meaning that the rest of the value is sent in subsequent + ``PartialResultSet``\ (s). This is denoted by the + [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] + field. Two or more chunked values can be merged to form a + complete value as follows: + + - ``bool/number/null``: cannot be chunked + - ``string``: concatenate the strings + - ``list``: concatenate the lists. If the last element in a + list is a ``string``, ``list``, or ``object``, merge it + with the first element in the next list by applying these + rules recursively. + - ``object``: concatenate the (field name, field value) + pairs. If a field name is duplicated, then apply these + rules recursively to merge the field values. + + Some examples of merging: + + :: + + # Strings are concatenated. + "foo", "bar" => "foobar" + + # Lists of non-strings are concatenated. + [2, 3], [4] => [2, 3, 4] + + # Lists are concatenated, but the last and first elements are merged + # because they are strings. + ["a", "b"], ["c", "d"] => ["a", "bc", "d"] + + # Lists are concatenated, but the last and first elements are merged + # because they are lists. Recursively, the last and first elements + # of the inner lists are merged because they are strings. + ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] + + # Non-overlapping object fields are combined. + {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} + + # Overlapping object fields are merged. + {"a": "1"}, {"a": "2"} => {"a": "12"} + + # Examples of merging objects containing lists of strings. + {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} + + For a more complete example, suppose a streaming SQL query + is yielding a result set whose rows contain a single string + field. The following ``PartialResultSet``\ s might be + yielded: + + :: + + { + "metadata": { ... } + "values": ["Hello", "W"] + "chunked_value": true + "resume_token": "Af65..." + } + { + "values": ["orl"] + "chunked_value": true + "resume_token": "Bqp2..." + } + { + "values": ["d"] + "resume_token": "Zx1B..." + } + + This sequence of ``PartialResultSet``\ s encodes two rows, + one containing the field value ``"Hello"``, and a second + containing the field value ``"World" = "W" + "orl" + "d"``. + chunked_value (bool): + If true, then the final value in + [values][google.spanner.v1.PartialResultSet.values] is + chunked, and must be combined with more values from + subsequent ``PartialResultSet``\ s to obtain a complete + field value. + resume_token (bytes): + Streaming calls might be interrupted for a variety of + reasons, such as TCP connection loss. If this occurs, the + stream of results can be resumed by re-sending the original + request and including ``resume_token``. Note that executing + any other transaction in the same session invalidates the + token. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the statement that + produced this streaming result set. These can be requested + by setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + and are sent only once with the last response in the stream. + This field will also be present in the last response for DML + statements. + """ + + metadata = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + values = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + chunked_value = proto.Field( + proto.BOOL, + number=3, + ) + resume_token = proto.Field( + proto.BYTES, + number=4, + ) + stats = proto.Field( + proto.MESSAGE, + number=5, + message='ResultSetStats', + ) + + +class ResultSetMetadata(proto.Message): + r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + Attributes: + row_type (google.cloud.spanner_v1.types.StructType): + Indicates the field names and types for the rows in the + result set. For example, a SQL query like + ``"SELECT UserId, UserName FROM Users"`` could return a + ``row_type`` value like: + + :: + + "fields": [ + { "name": "UserId", "type": { "code": "INT64" } }, + { "name": "UserName", "type": { "code": "STRING" } }, + ] + transaction (google.cloud.spanner_v1.types.Transaction): + If the read or SQL query began a transaction + as a side-effect, the information about the new + transaction is yielded here. + """ + + row_type = proto.Field( + proto.MESSAGE, + number=1, + message=gs_type.StructType, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + + +class ResultSetStats(proto.Message): + r"""Additional statistics about a + [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_plan (google.cloud.spanner_v1.types.QueryPlan): + [QueryPlan][google.spanner.v1.QueryPlan] for the query + associated with this result. + query_stats (google.protobuf.struct_pb2.Struct): + Aggregated statistics from the execution of the query. Only + present when the query is profiled. For example, a query + could return the statistics as follows: + + :: + + { + "rows_returned": "3", + "elapsed_time": "1.22 secs", + "cpu_time": "1.19 secs" + } + row_count_exact (int): + Standard DML returns an exact count of rows + that were modified. + This field is a member of `oneof`_ ``row_count``. + row_count_lower_bound (int): + Partitioned DML does not offer exactly-once + semantics, so it returns a lower bound of the + rows modified. + This field is a member of `oneof`_ ``row_count``. + """ + + query_plan = proto.Field( + proto.MESSAGE, + number=1, + message=gs_query_plan.QueryPlan, + ) + query_stats = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + row_count_exact = proto.Field( + proto.INT64, + number=3, + oneof='row_count', + ) + row_count_lower_bound = proto.Field( + proto.INT64, + number=4, + oneof='row_count', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py new file mode 100644 index 0000000000..187aa7b83b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py @@ -0,0 +1,1263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CreateSessionRequest', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'Session', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'DeleteSessionRequest', + 'RequestOptions', + 'ExecuteSqlRequest', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'Partition', + 'PartitionResponse', + 'ReadRequest', + 'BeginTransactionRequest', + 'CommitRequest', + 'RollbackRequest', + }, +) + + +class CreateSessionRequest(proto.Message): + r"""The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + + Attributes: + database (str): + Required. The database in which the new + session is created. + session (google.cloud.spanner_v1.types.Session): + The session to create. + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + session = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + + +class BatchCreateSessionsRequest(proto.Message): + r"""The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + database (str): + Required. The database in which the new + sessions are created. + session_template (google.cloud.spanner_v1.types.Session): + Parameters to be applied to each created + session. + session_count (int): + Required. The number of sessions to be created in this batch + call. The API may return fewer than the requested number of + sessions. If a specific number of sessions are desired, the + client can make additional calls to BatchCreateSessions + (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + session_template = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + session_count = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateSessionsResponse(proto.Message): + r"""The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + session (Sequence[google.cloud.spanner_v1.types.Session]): + The freshly created sessions. + """ + + session = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + + +class Session(proto.Message): + r"""A session in the Cloud Spanner API. + + Attributes: + name (str): + Output only. The name of the session. This is + always system-assigned. + labels (Sequence[google.cloud.spanner_v1.types.Session.LabelsEntry]): + The labels for the session. + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + - No more than 64 labels can be associated with a given + session. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the session + is created. + approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The approximate timestamp when + the session is last used. It is typically + earlier than the actual last use time. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + create_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + approximate_last_use_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class GetSessionRequest(proto.Message): + r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. + + Attributes: + name (str): + Required. The name of the session to + retrieve. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + database (str): + Required. The database in which to list + sessions. + page_size (int): + Number of sessions to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] + from a previous + [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``labels.env:*`` --> The session has the label "env". + - ``labels.env:dev`` --> The session has the label "env" + and the value of the label contains the string "dev". + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSessionsResponse(proto.Message): + r"""The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + sessions (Sequence[google.cloud.spanner_v1.types.Session]): + The list of requested sessions. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListSessions][google.spanner.v1.Spanner.ListSessions] call + to fetch more of the matching sessions. + """ + + @property + def raw_page(self): + return self + + sessions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSessionRequest(proto.Message): + r"""The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + + Attributes: + name (str): + Required. The name of the session to delete. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class RequestOptions(proto.Message): + r"""Common request options for various APIs. + + Attributes: + priority (google.cloud.spanner_v1.types.RequestOptions.Priority): + Priority for the request. + request_tag (str): + A per-request tag which can be applied to queries or reads, + used for statistics collection. Both request_tag and + transaction_tag can be specified for a read or query that + belongs to a transaction. This field is ignored for requests + where it's not applicable (e.g. CommitRequest). Legal + characters for ``request_tag`` values are all printable + characters (ASCII 32 - 126) and the length of a request_tag + is limited to 50 characters. Values that exceed this limit + are truncated. + transaction_tag (str): + A tag used for statistics collection about this transaction. + Both request_tag and transaction_tag can be specified for a + read or query that belongs to a transaction. The value of + transaction_tag should be the same for all requests + belonging to the same transaction. If this request doesn’t + belong to any transaction, transaction_tag will be ignored. + Legal characters for ``transaction_tag`` values are all + printable characters (ASCII 32 - 126) and the length of a + transaction_tag is limited to 50 characters. Values that + exceed this limit are truncated. + """ + class Priority(proto.Enum): + r"""The relative priority for requests. Note that priority is not + applicable for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + The priority acts as a hint to the Cloud Spanner scheduler and does + not guarantee priority or order of execution. For example: + + - Some parts of a write operation always execute at + ``PRIORITY_HIGH``, regardless of the specified priority. This may + cause you to see an increase in high priority workload even when + executing a low priority request. This can also potentially cause + a priority inversion where a lower priority request will be + fulfilled ahead of a higher priority request. + - If a transaction contains multiple operations with different + priorities, Cloud Spanner does not guarantee to process the + higher priority operations first. There may be other constraints + to satisfy, such as order of operations. + """ + PRIORITY_UNSPECIFIED = 0 + PRIORITY_LOW = 1 + PRIORITY_MEDIUM = 2 + PRIORITY_HIGH = 3 + + priority = proto.Field( + proto.ENUM, + number=1, + enum=Priority, + ) + request_tag = proto.Field( + proto.STRING, + number=2, + ) + transaction_tag = proto.Field( + proto.STRING, + number=3, + ) + + +class ExecuteSqlRequest(proto.Message): + r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] + and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + + Attributes: + session (str): + Required. The session in which the SQL query + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. + For queries, if none is provided, the default is + a temporary read-only transaction with strong + concurrency. + + Standard DML statements require a read-write + transaction. To protect against replays, single- + use transactions are not supported. The caller + must either supply an existing transaction ID or + begin a new transaction. + Partitioned DML requires an existing Partitioned + DML transaction ID. + sql (str): + Required. The SQL string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names must conform to the naming + requirements of identifiers as specified at + https://cloud.google.com/spanner/docs/lexical#identifiers. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It is an error to execute a SQL statement with unbound + parameters. + param_types (Sequence[google.cloud.spanner_v1.types.ExecuteSqlRequest.ParamTypesEntry]): + It is not always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON + strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + resume_token (bytes): + If this request is resuming a previously interrupted SQL + statement execution, ``resume_token`` should be copied from + the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + SQL statement execution to resume where the last one left + off. The rest of the request parameters must exactly match + the request that yielded this token. + query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): + Used to control the amount of debugging information returned + in [ResultSetStats][google.spanner.v1.ResultSetStats]. If + [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] + is set, + [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + can only be set to + [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. + partition_token (bytes): + If present, results will be restricted to the specified + partition previously created using PartitionQuery(). There + must be an exact match for the values of fields common to + this message and the PartitionQueryRequest message used to + create this partition_token. + seqno (int): + A per-transaction sequence number used to + identify this request. This field makes each + request idempotent such that if the request is + received multiple times, at most one will + succeed. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction may be aborted. + Replays of previously handled requests will + yield the same response as the first execution. + Required for DML statements. Ignored for + queries. + query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): + Query optimizer configuration to use for the + given query. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + """ + class QueryMode(proto.Enum): + r"""Mode in which the statement must be processed.""" + NORMAL = 0 + PLAN = 1 + PROFILE = 2 + + class QueryOptions(proto.Message): + r"""Query optimizer configuration. + + Attributes: + optimizer_version (str): + An option to control the selection of optimizer version. + + This parameter allows individual queries to pick different + query optimizer versions. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest supported query optimizer version. If not + specified, Cloud Spanner uses the optimizer version set at + the database level options. Any other positive integer (from + the list of supported optimizer versions) overrides the + default optimizer version for query execution. + + The list of supported optimizer versions can be queried from + SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. + + Executing a SQL statement with an invalid optimizer version + fails with an ``INVALID_ARGUMENT`` error. + + See + https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer + for more information on managing the query optimizer. + + The ``optimizer_version`` statement hint has precedence over + this setting. + optimizer_statistics_package (str): + An option to control the selection of optimizer statistics + package. + + This parameter allows individual queries to use a different + query optimizer statistics package. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest generated statistics package. If not + specified, Cloud Spanner uses the statistics package set at + the database level options, or the latest package if the + database option is not set. + + The statistics package requested by the query has to be + exempt from garbage collection. This can be achieved with + the following DDL statement: + + :: + + ALTER STATISTICS SET OPTIONS (allow_gc=false) + + The list of available statistics packages can be queried + from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. + + Executing a SQL statement with an invalid optimizer + statistics package or with a statistics package that allows + garbage collection fails with an ``INVALID_ARGUMENT`` error. + """ + + optimizer_version = proto.Field( + proto.STRING, + number=1, + ) + optimizer_statistics_package = proto.Field( + proto.STRING, + number=2, + ) + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql = proto.Field( + proto.STRING, + number=3, + ) + params = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + resume_token = proto.Field( + proto.BYTES, + number=6, + ) + query_mode = proto.Field( + proto.ENUM, + number=7, + enum=QueryMode, + ) + partition_token = proto.Field( + proto.BYTES, + number=8, + ) + seqno = proto.Field( + proto.INT64, + number=9, + ) + query_options = proto.Field( + proto.MESSAGE, + number=10, + message=QueryOptions, + ) + request_options = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + + +class ExecuteBatchDmlRequest(proto.Message): + r"""The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + + Attributes: + session (str): + Required. The session in which the DML + statements should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Required. The transaction to use. Must be a + read-write transaction. + To protect against replays, single-use + transactions are not supported. The caller must + either supply an existing transaction ID or + begin a new transaction. + statements (Sequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): + Required. The list of statements to execute in this batch. + Statements are executed serially, such that the effects of + statement ``i`` are visible to statement ``i+1``. Each + statement must be a DML statement. Execution stops at the + first failed statement; the remaining statements are not + executed. + + Callers must provide at least one statement. + seqno (int): + Required. A per-transaction sequence number + used to identify this request. This field makes + each request idempotent such that if the request + is received multiple times, at most one will + succeed. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction may be aborted. + Replays of previously handled requests will + yield the same response as the first execution. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + """ + + class Statement(proto.Message): + r"""A single DML statement. + + Attributes: + sql (str): + Required. The DML string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + DML string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It is an error to execute a SQL statement with unbound + parameters. + param_types (Sequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement.ParamTypesEntry]): + It is not always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] + as JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + """ + + sql = proto.Field( + proto.STRING, + number=1, + ) + params = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + param_types = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message=gs_type.Type, + ) + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + statements = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Statement, + ) + seqno = proto.Field( + proto.INT64, + number=4, + ) + request_options = proto.Field( + proto.MESSAGE, + number=5, + message='RequestOptions', + ) + + +class ExecuteBatchDmlResponse(proto.Message): + r"""The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of [ResultSet][google.spanner.v1.ResultSet] + messages, one for each DML statement that has successfully executed, + in the same order as the statements in the request. If a statement + fails, the status in the response body identifies the cause of the + failure. + + To check for DML statements that failed, use the following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates + that all statements were executed successfully. + 2. If the status was not ``OK``, check the number of result sets in + the response. If the response contains ``N`` + [ResultSet][google.spanner.v1.ResultSet] messages, then statement + ``N+1`` in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed successfully. + - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, + with the status ``OK``. + + Example 2: + + - Request: 5 DML statements. The third statement has a syntax + error. + - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (``INVALID_ARGUMENT``) status. The number of + [ResultSet][google.spanner.v1.ResultSet] messages indicates that + the third statement failed, and the fourth and fifth statements + were not executed. + + Attributes: + result_sets (Sequence[google.cloud.spanner_v1.types.ResultSet]): + One [ResultSet][google.spanner.v1.ResultSet] for each + statement in the request that ran successfully, in the same + order as the statements in the request. Each + [ResultSet][google.spanner.v1.ResultSet] does not contain + any rows. The + [ResultSetStats][google.spanner.v1.ResultSetStats] in each + [ResultSet][google.spanner.v1.ResultSet] contain the number + of rows modified by the statement. + + Only the first [ResultSet][google.spanner.v1.ResultSet] in + the response contains valid + [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. + status (google.rpc.status_pb2.Status): + If all DML statements are executed successfully, the status + is ``OK``. Otherwise, the error status of the first failed + statement. + """ + + result_sets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=result_set.ResultSet, + ) + status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class PartitionOptions(proto.Message): + r"""Options for a PartitionQueryRequest and + PartitionReadRequest. + + Attributes: + partition_size_bytes (int): + **Note:** This hint is currently ignored by PartitionQuery + and PartitionRead requests. + + The desired data size for each partition generated. The + default for this option is currently 1 GiB. This is only a + hint. The actual size of each partition may be smaller or + larger than this size request. + max_partitions (int): + **Note:** This hint is currently ignored by PartitionQuery + and PartitionRead requests. + + The desired maximum number of partitions to return. For + example, this may be set to the number of workers available. + The default for this option is currently 10,000. The maximum + value is currently 200,000. This is only a hint. The actual + number of partitions returned may be smaller or larger than + this maximum count request. + """ + + partition_size_bytes = proto.Field( + proto.INT64, + number=1, + ) + max_partitions = proto.Field( + proto.INT64, + number=2, + ) + + +class PartitionQueryRequest(proto.Message): + r"""The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read only snapshot transactions are + supported, read/write and single use + transactions are not. + sql (str): + Required. The query request to generate partitions for. The + request will fail if the query is not root partitionable. + The query plan of a root partitionable query has a single + distributed union operator. A distributed union operator + conceptually divides one or more tables into multiple + splits, remotely evaluates a subquery independently on each + split, and then unions all results. + + This must not contain DML commands, such as INSERT, UPDATE, + or DELETE. Use + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + with a PartitionedDml transaction for large, + partition-friendly DML operations. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It is an error to execute a SQL statement with unbound + parameters. + param_types (Sequence[google.cloud.spanner_v1.types.PartitionQueryRequest.ParamTypesEntry]): + It is not always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.PartitionQueryRequest.params] as + JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL query parameters. + See the definition of [Type][google.spanner.v1.Type] for + more information about SQL types. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql = proto.Field( + proto.STRING, + number=3, + ) + params = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + partition_options = proto.Field( + proto.MESSAGE, + number=6, + message='PartitionOptions', + ) + + +class PartitionReadRequest(proto.Message): + r"""The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read only snapshot transactions are + supported, read/write and single use + transactions are not. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.PartitionReadRequest.table]. This + index is used instead of the table primary key when + interpreting + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + and sorting result rows. See + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + for further information. + columns (Sequence[str]): + The columns of + [table][google.spanner.v1.PartitionReadRequest.table] to be + returned for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.PartitionReadRequest.table] to be + yielded, unless + [index][google.spanner.v1.PartitionReadRequest.index] is + present. If + [index][google.spanner.v1.PartitionReadRequest.index] is + present, then + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + instead names index keys in + [index][google.spanner.v1.PartitionReadRequest.index]. + + It is not an error for the ``key_set`` to name rows that do + not exist in the database. Read yields nothing for + nonexistent rows. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table = proto.Field( + proto.STRING, + number=3, + ) + index = proto.Field( + proto.STRING, + number=4, + ) + columns = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + partition_options = proto.Field( + proto.MESSAGE, + number=9, + message='PartitionOptions', + ) + + +class Partition(proto.Message): + r"""Information returned for each partition returned in a + PartitionResponse. + + Attributes: + partition_token (bytes): + This token can be passed to Read, + StreamingRead, ExecuteSql, or + ExecuteStreamingSql requests to restrict the + results to those identified by this partition + token. + """ + + partition_token = proto.Field( + proto.BYTES, + number=1, + ) + + +class PartitionResponse(proto.Message): + r"""The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + partitions (Sequence[google.cloud.spanner_v1.types.Partition]): + Partitions created by this request. + transaction (google.cloud.spanner_v1.types.Transaction): + Transaction created by this request. + """ + + partitions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Partition', + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + + +class ReadRequest(proto.Message): + r"""The request for [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + + Attributes: + session (str): + Required. The session in which the read + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. If none is provided, + the default is a temporary read-only transaction + with strong concurrency. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.ReadRequest.table]. This index is + used instead of the table primary key when interpreting + [key_set][google.spanner.v1.ReadRequest.key_set] and sorting + result rows. See + [key_set][google.spanner.v1.ReadRequest.key_set] for further + information. + columns (Sequence[str]): + Required. The columns of + [table][google.spanner.v1.ReadRequest.table] to be returned + for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.ReadRequest.table] to be yielded, + unless [index][google.spanner.v1.ReadRequest.index] is + present. If [index][google.spanner.v1.ReadRequest.index] is + present, then + [key_set][google.spanner.v1.ReadRequest.key_set] instead + names index keys in + [index][google.spanner.v1.ReadRequest.index]. + + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field is empty, rows are yielded in table primary key order + (if [index][google.spanner.v1.ReadRequest.index] is empty) + or index key order (if + [index][google.spanner.v1.ReadRequest.index] is non-empty). + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field is not empty, rows will be yielded in an unspecified + order. + + It is not an error for the ``key_set`` to name rows that do + not exist in the database. Read yields nothing for + nonexistent rows. + limit (int): + If greater than zero, only the first ``limit`` rows are + yielded. If ``limit`` is zero, the default is no limit. A + limit cannot be specified if ``partition_token`` is set. + resume_token (bytes): + If this request is resuming a previously interrupted read, + ``resume_token`` should be copied from the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + read to resume where the last read left off. The rest of the + request parameters must exactly match the request that + yielded this token. + partition_token (bytes): + If present, results will be restricted to the specified + partition previously created using PartitionRead(). There + must be an exact match for the values of fields common to + this message and the PartitionReadRequest message used to + create this partition_token. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table = proto.Field( + proto.STRING, + number=3, + ) + index = proto.Field( + proto.STRING, + number=4, + ) + columns = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + limit = proto.Field( + proto.INT64, + number=8, + ) + resume_token = proto.Field( + proto.BYTES, + number=9, + ) + partition_token = proto.Field( + proto.BYTES, + number=10, + ) + request_options = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + + +class BeginTransactionRequest(proto.Message): + r"""The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + Attributes: + session (str): + Required. The session in which the + transaction runs. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new transaction. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. Priority is ignored for + this request. Setting the priority in this request_options + struct will not do anything. To set the priority for a + transaction, set it on the reads and writes that are part of + this transaction instead. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + options = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionOptions, + ) + request_options = proto.Field( + proto.MESSAGE, + number=3, + message='RequestOptions', + ) + + +class CommitRequest(proto.Message): + r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + session (str): + Required. The session in which the + transaction to be committed is running. + transaction_id (bytes): + Commit a previously-started transaction. + This field is a member of `oneof`_ ``transaction``. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, commit + with a temporary transaction is non-idempotent. That is, if + the ``CommitRequest`` is sent to Cloud Spanner more than + once (for instance, due to retries in the application, or in + the transport library), it is possible that the mutations + are executed more than once. If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + This field is a member of `oneof`_ ``transaction``. + mutations (Sequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when this + transaction commits. All mutations are applied + atomically, in the order they appear in this + list. + return_commit_stats (bool): + If ``true``, then statistics related to the transaction will + be included in the + [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. + Default value is ``false``. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction_id = proto.Field( + proto.BYTES, + number=2, + oneof='transaction', + ) + single_use_transaction = proto.Field( + proto.MESSAGE, + number=3, + oneof='transaction', + message=gs_transaction.TransactionOptions, + ) + mutations = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=mutation.Mutation, + ) + return_commit_stats = proto.Field( + proto.BOOL, + number=5, + ) + request_options = proto.Field( + proto.MESSAGE, + number=6, + message='RequestOptions', + ) + + +class RollbackRequest(proto.Message): + r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. + + Attributes: + session (str): + Required. The session in which the + transaction to roll back is running. + transaction_id (bytes): + Required. The transaction to roll back. + """ + + session = proto.Field( + proto.STRING, + number=1, + ) + transaction_id = proto.Field( + proto.BYTES, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py new file mode 100644 index 0000000000..31d6f26bdb --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py @@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TransactionOptions', + 'Transaction', + 'TransactionSelector', + }, +) + + +class TransactionOptions(proto.Message): + r"""Transactions + ============ + + Each session can have at most one active transaction at a time (note + that standalone reads and queries use a transaction internally and + do count towards the one transaction limit). After the active + transaction is completed, the session can immediately be re-used for + the next transaction. It is not necessary to create a new session + for each transaction. + + Transaction Modes + ================= + + Cloud Spanner supports three transaction modes: + + 1. Locking read-write. This type of transaction is the only way to + write data into Cloud Spanner. These transactions rely on + pessimistic locking and, if necessary, two-phase commit. Locking + read-write transactions may abort, requiring the application to + retry. + + 2. Snapshot read-only. This transaction type provides guaranteed + consistency across several reads, but does not allow writes. + Snapshot read-only transactions can be configured to read at + timestamps in the past. Snapshot read-only transactions do not + need to be committed. + + 3. Partitioned DML. This type of transaction is used to execute a + single Partitioned DML statement. Partitioned DML partitions the + key space and runs the DML statement over each partition in + parallel using separate, internal transactions that commit + independently. Partitioned DML transactions do not need to be + committed. + + For transactions that only read, snapshot read-only transactions + provide simpler semantics and are almost always faster. In + particular, read-only transactions do not take locks, so they do not + conflict with read-write transactions. As a consequence of not + taking locks, they also do not abort, so retry loops are not needed. + + Transactions may only read/write data in a single database. They + may, however, read/write data in different tables within that + database. + + Locking Read-Write Transactions + ------------------------------- + + Locking transactions may be used to atomically read-modify-write + data anywhere in a database. This type of transaction is externally + consistent. + + Clients should attempt to minimize the amount of time a transaction + is active. Faster transactions commit with higher probability and + cause less contention. Cloud Spanner attempts to keep read locks + active as long as the transaction continues to do reads, and the + transaction has not been terminated by + [Commit][google.spanner.v1.Spanner.Commit] or + [Rollback][google.spanner.v1.Spanner.Rollback]. Long periods of + inactivity at the client may cause Cloud Spanner to release a + transaction's locks and abort it. + + Conceptually, a read-write transaction consists of zero or more + reads or SQL statements followed by + [Commit][google.spanner.v1.Spanner.Commit]. At any time before + [Commit][google.spanner.v1.Spanner.Commit], the client can send a + [Rollback][google.spanner.v1.Spanner.Rollback] request to abort the + transaction. + + Semantics + --------- + + Cloud Spanner can commit the transaction if all read locks it + acquired are still valid at commit time, and it is able to acquire + write locks for all writes. Cloud Spanner can abort the transaction + for any reason. If a commit attempt returns ``ABORTED``, Cloud + Spanner guarantees that the transaction has not modified any user + data in Cloud Spanner. + + Unless the transaction commits, Cloud Spanner makes no guarantees + about how long the transaction's locks were held for. It is an error + to use Cloud Spanner locks for any sort of mutual exclusion other + than between Cloud Spanner transactions themselves. + + Retrying Aborted Transactions + ----------------------------- + + When a transaction aborts, the application can choose to retry the + whole transaction again. To maximize the chances of successfully + committing the retry, the client should execute the retry in the + same session as the original attempt. The original session's lock + priority increases with each consecutive abort, meaning that each + attempt has a slightly better chance of success than the previous. + + Under some circumstances (e.g., many transactions attempting to + modify the same row(s)), a transaction can abort many times in a + short period before successfully committing. Thus, it is not a good + idea to cap the number of retries a transaction can attempt; + instead, it is better to limit the total amount of wall time spent + retrying. + + Idle Transactions + ----------------- + + A transaction is considered idle if it has no outstanding reads or + SQL queries and has not started a read or SQL query within the last + 10 seconds. Idle transactions can be aborted by Cloud Spanner so + that they don't hold on to locks indefinitely. In that case, the + commit will fail with error ``ABORTED``. + + If this behavior is undesirable, periodically executing a simple SQL + query in the transaction (e.g., ``SELECT 1``) prevents the + transaction from becoming idle. + + Snapshot Read-Only Transactions + ------------------------------- + + Snapshot read-only transactions provides a simpler method than + locking read-write transactions for doing several consistent reads. + However, this type of transaction does not support writes. + + Snapshot transactions do not take locks. Instead, they work by + choosing a Cloud Spanner timestamp, then executing all reads at that + timestamp. Since they do not acquire locks, they do not block + concurrent read-write transactions. + + Unlike locking read-write transactions, snapshot read-only + transactions never abort. They can fail if the chosen read timestamp + is garbage collected; however, the default garbage collection policy + is generous enough that most applications do not need to worry about + this in practice. + + Snapshot read-only transactions do not need to call + [Commit][google.spanner.v1.Spanner.Commit] or + [Rollback][google.spanner.v1.Spanner.Rollback] (and in fact are not + permitted to do so). + + To execute a snapshot transaction, the client specifies a timestamp + bound, which tells Cloud Spanner how to choose a read timestamp. + + The types of timestamp bound are: + + - Strong (the default). + - Bounded staleness. + - Exact staleness. + + If the Cloud Spanner database to be read is geographically + distributed, stale read-only transactions can execute more quickly + than strong or read-write transaction, because they are able to + execute far from the leader replica. + + Each type of timestamp bound is discussed in detail below. + + Strong + ------ + + Strong reads are guaranteed to see the effects of all transactions + that have committed before the start of the read. Furthermore, all + rows yielded by a single read are consistent with each other -- if + any part of the read observes a transaction, all parts of the read + see the transaction. + + Strong reads are not repeatable: two consecutive strong read-only + transactions might return inconsistent results if there are + concurrent writes. If consistency across reads is required, the + reads should be executed within a transaction or at an exact read + timestamp. + + See + [TransactionOptions.ReadOnly.strong][google.spanner.v1.TransactionOptions.ReadOnly.strong]. + + Exact Staleness + --------------- + + These timestamp bounds execute reads at a user-specified timestamp. + Reads at a timestamp are guaranteed to see a consistent prefix of + the global transaction history: they observe modifications done by + all transactions with a commit timestamp <= the read timestamp, and + observe none of the modifications done by transactions with a larger + commit timestamp. They will block until all conflicting transactions + that may be assigned commit timestamps <= the read timestamp have + finished. + + The timestamp can either be expressed as an absolute Cloud Spanner + commit timestamp or a staleness relative to the current time. + + These modes do not require a "negotiation phase" to pick a + timestamp. As a result, they execute slightly faster than the + equivalent boundedly stale concurrency modes. On the other hand, + boundedly stale reads usually return fresher results. + + See + [TransactionOptions.ReadOnly.read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.read_timestamp] + and + [TransactionOptions.ReadOnly.exact_staleness][google.spanner.v1.TransactionOptions.ReadOnly.exact_staleness]. + + Bounded Staleness + ----------------- + + Bounded staleness modes allow Cloud Spanner to pick the read + timestamp, subject to a user-provided staleness bound. Cloud Spanner + chooses the newest timestamp within the staleness bound that allows + execution of the reads at the closest available replica without + blocking. + + All rows yielded are consistent with each other -- if any part of + the read observes a transaction, all parts of the read see the + transaction. Boundedly stale reads are not repeatable: two stale + reads, even if they use the same staleness bound, can execute at + different timestamps and thus return inconsistent results. + + Boundedly stale reads execute in two phases: the first phase + negotiates a timestamp among all replicas needed to serve the read. + In the second phase, reads are executed at the negotiated timestamp. + + As a result of the two phase execution, bounded staleness reads are + usually a little slower than comparable exact staleness reads. + However, they are typically able to return fresher results, and are + more likely to execute at the closest replica. + + Because the timestamp negotiation requires up-front knowledge of + which rows will be read, it can only be used with single-use + read-only transactions. + + See + [TransactionOptions.ReadOnly.max_staleness][google.spanner.v1.TransactionOptions.ReadOnly.max_staleness] + and + [TransactionOptions.ReadOnly.min_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.min_read_timestamp]. + + Old Read Timestamps and Garbage Collection + ------------------------------------------ + + Cloud Spanner continuously garbage collects deleted and overwritten + data in the background to reclaim storage space. This process is + known as "version GC". By default, version GC reclaims versions + after they are one hour old. Because of this, Cloud Spanner cannot + perform reads at read timestamps more than one hour in the past. + This restriction also applies to in-progress reads and/or SQL + queries whose timestamp become too old while executing. Reads and + SQL queries with too-old read timestamps fail with the error + ``FAILED_PRECONDITION``. + + Partitioned DML Transactions + ---------------------------- + + Partitioned DML transactions are used to execute DML statements with + a different execution strategy that provides different, and often + better, scalability properties for large, table-wide operations than + DML in a ReadWrite transaction. Smaller scoped statements, such as + an OLTP workload, should prefer using ReadWrite transactions. + + Partitioned DML partitions the keyspace and runs the DML statement + on each partition in separate, internal transactions. These + transactions commit automatically when complete, and run + independently from one another. + + To reduce lock contention, this execution strategy only acquires + read locks on rows that match the WHERE clause of the statement. + Additionally, the smaller per-partition transactions hold locks for + less time. + + That said, Partitioned DML is not a drop-in replacement for standard + DML used in ReadWrite transactions. + + - The DML statement must be fully-partitionable. Specifically, the + statement must be expressible as the union of many statements + which each access only a single row of the table. + + - The statement is not applied atomically to all rows of the table. + Rather, the statement is applied atomically to partitions of the + table, in independent transactions. Secondary index rows are + updated atomically with the base table rows. + + - Partitioned DML does not guarantee exactly-once execution + semantics against a partition. The statement will be applied at + least once to each partition. It is strongly recommended that the + DML statement should be idempotent to avoid unexpected results. + For instance, it is potentially dangerous to run a statement such + as ``UPDATE table SET column = column + 1`` as it could be run + multiple times against some rows. + + - The partitions are committed automatically - there is no support + for Commit or Rollback. If the call returns an error, or if the + client issuing the ExecuteSql call dies, it is possible that some + rows had the statement executed on them successfully. It is also + possible that statement was never executed against other rows. + + - Partitioned DML transactions may only contain the execution of a + single DML statement via ExecuteSql or ExecuteStreamingSql. + + - If any error is encountered during the execution of the + partitioned DML operation (for instance, a UNIQUE INDEX + violation, division by zero, or a value that cannot be stored due + to schema constraints), then the operation is stopped at that + point and an error is returned. It is possible that at this + point, some partitions have been committed (or even committed + multiple times), and other partitions have not been run at all. + + Given the above, Partitioned DML is good fit for large, + database-wide, operations that are idempotent, such as deleting old + rows from a very large table. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): + Transaction may write. + + Authorization to begin a read-write transaction requires + ``spanner.databases.beginOrRollbackReadWriteTransaction`` + permission on the ``session`` resource. + This field is a member of `oneof`_ ``mode``. + partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): + Partitioned DML transaction. + + Authorization to begin a Partitioned DML transaction + requires + ``spanner.databases.beginPartitionedDmlTransaction`` + permission on the ``session`` resource. + This field is a member of `oneof`_ ``mode``. + read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): + Transaction will not write. + + Authorization to begin a read-only transaction requires + ``spanner.databases.beginReadOnlyTransaction`` permission on + the ``session`` resource. + This field is a member of `oneof`_ ``mode``. + """ + + class ReadWrite(proto.Message): + r"""Message type to initiate a read-write transaction. Currently + this transaction type has no options. + + """ + + class PartitionedDml(proto.Message): + r"""Message type to initiate a Partitioned DML transaction. + """ + + class ReadOnly(proto.Message): + r"""Message type to initiate a read-only transaction. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + strong (bool): + Read at a timestamp where all previously + committed transactions are visible. + This field is a member of `oneof`_ ``timestamp_bound``. + min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at a timestamp >= ``min_read_timestamp``. + + This is useful for requesting fresher data than some + previous read, or data that is fresh enough to observe the + effects of some previously committed transaction whose + timestamp is known. + + Note that this option can only be used in single-use + transactions. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + This field is a member of `oneof`_ ``timestamp_bound``. + max_staleness (google.protobuf.duration_pb2.Duration): + Read data at a timestamp >= ``NOW - max_staleness`` seconds. + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading the freshest data available at a nearby + replica, while bounding the possible staleness if the local + replica has fallen behind. + + Note that this option can only be used in single-use + transactions. + This field is a member of `oneof`_ ``timestamp_bound``. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at the given timestamp. Unlike other + modes, reads at a specific timestamp are repeatable; the + same read at the same timestamp always returns the same + data. If the timestamp is in the future, the read will block + until the specified timestamp, modulo the read's deadline. + + Useful for large scale consistent reads such as mapreduces, + or for coordinating many reads against a consistent snapshot + of the data. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + This field is a member of `oneof`_ ``timestamp_bound``. + exact_staleness (google.protobuf.duration_pb2.Duration): + Executes all reads at a timestamp that is + ``exact_staleness`` old. The timestamp is chosen soon after + the read is started. + + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading at nearby replicas without the + distributed timestamp negotiation overhead of + ``max_staleness``. + This field is a member of `oneof`_ ``timestamp_bound``. + return_read_timestamp (bool): + If true, the Cloud Spanner-selected read timestamp is + included in the [Transaction][google.spanner.v1.Transaction] + message that describes the transaction. + """ + + strong = proto.Field( + proto.BOOL, + number=1, + oneof='timestamp_bound', + ) + min_read_timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + max_staleness = proto.Field( + proto.MESSAGE, + number=3, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + read_timestamp = proto.Field( + proto.MESSAGE, + number=4, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + exact_staleness = proto.Field( + proto.MESSAGE, + number=5, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + return_read_timestamp = proto.Field( + proto.BOOL, + number=6, + ) + + read_write = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ReadWrite, + ) + partitioned_dml = proto.Field( + proto.MESSAGE, + number=3, + oneof='mode', + message=PartitionedDml, + ) + read_only = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=ReadOnly, + ) + + +class Transaction(proto.Message): + r"""A transaction. + + Attributes: + id (bytes): + ``id`` may be used to identify the transaction in subsequent + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], + [Commit][google.spanner.v1.Spanner.Commit], or + [Rollback][google.spanner.v1.Spanner.Rollback] calls. + + Single-use read-only transactions do not have IDs, because + single-use transactions do not support multiple requests. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + For snapshot read-only transactions, the read timestamp + chosen for the transaction. Not returned by default: see + [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + """ + + id = proto.Field( + proto.BYTES, + number=1, + ) + read_timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class TransactionSelector(proto.Message): + r"""This message is used to select the transaction in which a + [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. + + See [TransactionOptions][google.spanner.v1.TransactionOptions] for + more information about transactions. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + single_use (google.cloud.spanner_v1.types.TransactionOptions): + Execute the read or SQL query in a temporary + transaction. This is the most efficient way to + execute a transaction that consists of a single + SQL query. + This field is a member of `oneof`_ ``selector``. + id (bytes): + Execute the read or SQL query in a + previously-started transaction. + This field is a member of `oneof`_ ``selector``. + begin (google.cloud.spanner_v1.types.TransactionOptions): + Begin a new transaction and execute this read or SQL query + in it. The transaction ID of the new transaction is returned + in + [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], + which is a [Transaction][google.spanner.v1.Transaction]. + This field is a member of `oneof`_ ``selector``. + """ + + single_use = proto.Field( + proto.MESSAGE, + number=1, + oneof='selector', + message='TransactionOptions', + ) + id = proto.Field( + proto.BYTES, + number=2, + oneof='selector', + ) + begin = proto.Field( + proto.MESSAGE, + number=3, + oneof='selector', + message='TransactionOptions', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py new file mode 100644 index 0000000000..28bbc6fc51 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py @@ -0,0 +1,137 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TypeCode', + 'Type', + 'StructType', + }, +) + + +class TypeCode(proto.Enum): + r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to + indicate the type of a Cloud Spanner value. + + Each legal value of a type can be encoded to or decoded from a JSON + value, using the encodings described below. All Cloud Spanner values + can be ``null``, regardless of type; ``null``\ s are always encoded + as a JSON ``null``. + """ + TYPE_CODE_UNSPECIFIED = 0 + BOOL = 1 + INT64 = 2 + FLOAT64 = 3 + TIMESTAMP = 4 + DATE = 5 + STRING = 6 + BYTES = 7 + ARRAY = 8 + STRUCT = 9 + NUMERIC = 10 + JSON = 11 + + +class Type(proto.Message): + r"""``Type`` indicates the type of a Cloud Spanner value, as might be + stored in a table cell or returned from an SQL query. + + Attributes: + code (google.cloud.spanner_v1.types.TypeCode): + Required. The [TypeCode][google.spanner.v1.TypeCode] for + this type. + array_element_type (google.cloud.spanner_v1.types.Type): + If [code][google.spanner.v1.Type.code] == + [ARRAY][google.spanner.v1.TypeCode.ARRAY], then + ``array_element_type`` is the type of the array elements. + struct_type (google.cloud.spanner_v1.types.StructType): + If [code][google.spanner.v1.Type.code] == + [STRUCT][google.spanner.v1.TypeCode.STRUCT], then + ``struct_type`` provides type information for the struct's + fields. + """ + + code = proto.Field( + proto.ENUM, + number=1, + enum='TypeCode', + ) + array_element_type = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + struct_type = proto.Field( + proto.MESSAGE, + number=3, + message='StructType', + ) + + +class StructType(proto.Message): + r"""``StructType`` defines the fields of a + [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. + + Attributes: + fields (Sequence[google.cloud.spanner_v1.types.StructType.Field]): + The list of fields that make up this struct. Order is + significant, because values of this struct type are + represented as lists, where the order of field values + matches the order of fields in the + [StructType][google.spanner.v1.StructType]. In turn, the + order of fields matches the order of columns in a read + request, or the order of fields in the ``SELECT`` clause of + a query. + """ + + class Field(proto.Message): + r"""Message representing a single field of a struct. + + Attributes: + name (str): + The name of the field. For reads, this is the column name. + For SQL queries, it is the column alias (e.g., ``"Word"`` in + the query ``"SELECT 'hello' AS Word"``), or the column name + (e.g., ``"ColName"`` in the query + ``"SELECT ColName FROM Table"``). Some columns might have an + empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a + query result can contain multiple fields with the same name. + type_ (google.cloud.spanner_v1.types.Type): + The type of the field. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + + fields = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Field, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini new file mode 100644 index 0000000000..4505b48543 --- /dev/null +++ b/owl-bot-staging/spanner/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py new file mode 100644 index 0000000000..be3e3a6a51 --- /dev/null +++ b/owl-bot-staging/spanner/v1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/spanner_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.9') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.9') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py new file mode 100644 index 0000000000..fec728843e --- /dev/null +++ b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py @@ -0,0 +1,190 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spannerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_sessions': ('database', 'session_count', 'session_template', ), + 'begin_transaction': ('session', 'options', 'request_options', ), + 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), + 'create_session': ('database', 'session', ), + 'delete_session': ('name', ), + 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), + 'get_session': ('name', ), + 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), + 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), + 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), + 'rollback': ('session', 'transaction_id', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spannerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py new file mode 100644 index 0000000000..2455a6cc88 --- /dev/null +++ b/owl-bot-staging/spanner/v1/setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-spanner', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.19.4', + ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py new file mode 100644 index 0000000000..489b9aef0d --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py @@ -0,0 +1,4046 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient +from google.cloud.spanner_v1.services.spanner import SpannerClient +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.services.spanner import transports +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SpannerClient._get_default_mtls_endpoint(None) is None + assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + SpannerClient, + SpannerAsyncClient, +]) +def test_spanner_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SpannerGrpcTransport, "grpc"), + (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + SpannerClient, + SpannerAsyncClient, +]) +def test_spanner_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_spanner_client_get_transport_class(): + transport = SpannerClient.get_transport_class() + available_transports = [ + transports.SpannerGrpcTransport, + ] + assert transport in available_transports + + transport = SpannerClient.get_transport_class("grpc") + assert transport == transports.SpannerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) +def test_spanner_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_spanner_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SpannerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_create_session(transport: str = 'grpc', request_type=spanner.CreateSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + ) + response = client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CreateSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + + +def test_create_session_from_dict(): + test_create_session(request_type=dict) + + +def test_create_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + client.create_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CreateSessionRequest() + + +@pytest.mark.asyncio +async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + )) + response = await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CreateSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_session_async_from_dict(): + await test_create_session_async(request_type=dict) + + +def test_create_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = spanner.Session() + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_create_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +def test_create_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +@pytest.mark.asyncio +async def test_create_session_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +@pytest.mark.asyncio +async def test_create_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +def test_batch_create_sessions(transport: str = 'grpc', request_type=spanner.BatchCreateSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse( + ) + response = client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchCreateSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +def test_batch_create_sessions_from_dict(): + test_batch_create_sessions(request_type=dict) + + +def test_batch_create_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + client.batch_create_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchCreateSessionsRequest() + + +@pytest.mark.asyncio +async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( + )) + response = await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchCreateSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +@pytest.mark.asyncio +async def test_batch_create_sessions_async_from_dict(): + await test_batch_create_sessions_async(request_type=dict) + + +def test_batch_create_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = spanner.BatchCreateSessionsResponse() + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_create_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_batch_create_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + assert args[0].session_count == 1420 + + +def test_batch_create_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + assert args[0].session_count == 1420 + + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +def test_get_session(transport: str = 'grpc', request_type=spanner.GetSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + ) + response = client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.GetSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + + +def test_get_session_from_dict(): + test_get_session(request_type=dict) + + +def test_get_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + client.get_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.GetSessionRequest() + + +@pytest.mark.asyncio +async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + )) + response = await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.GetSessionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_session_async_from_dict(): + await test_get_session_async(request_type=dict) + + +def test_get_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = spanner.Session() + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_session_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +def test_list_sessions(transport: str = 'grpc', request_type=spanner.ListSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ListSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sessions_from_dict(): + test_list_sessions(request_type=dict) + + +def test_list_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + client.list_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ListSessionsRequest() + + +@pytest.mark.asyncio +async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ListSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + + +def test_list_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = spanner.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_list_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +def test_list_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +def test_list_sessions_pager(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('database', ''), + )), + ) + pager = client.list_sessions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, spanner.Session) + for i in results) + +def test_list_sessions_pages(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner.Session) + for i in responses) + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_sessions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_delete_session(transport: str = 'grpc', request_type=spanner.DeleteSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.DeleteSessionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_session_from_dict(): + test_delete_session(request_type=dict) + + +def test_delete_session_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + client.delete_session() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.DeleteSessionRequest() + + +@pytest.mark.asyncio +async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.DeleteSessionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_session_async_from_dict(): + await test_delete_session_async(request_type=dict) + + +def test_delete_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = None + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_session_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +def test_execute_sql(transport: str = 'grpc', request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_execute_sql_from_dict(): + test_execute_sql(request_type=dict) + + +def test_execute_sql_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + client.execute_sql() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + +@pytest.mark.asyncio +async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_execute_sql_async_from_dict(): + await test_execute_sql_async(request_type=dict) + + +def test_execute_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_execute_streaming_sql(transport: str = 'grpc', request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_execute_streaming_sql_from_dict(): + test_execute_streaming_sql(request_type=dict) + + +def test_execute_streaming_sql_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + client.execute_streaming_sql() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest() + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async_from_dict(): + await test_execute_streaming_sql_async(request_type=dict) + + +def test_execute_streaming_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_execute_batch_dml(transport: str = 'grpc', request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ExecuteBatchDmlResponse( + ) + response = client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteBatchDmlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +def test_execute_batch_dml_from_dict(): + test_execute_batch_dml(request_type=dict) + + +def test_execute_batch_dml_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + client.execute_batch_dml() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteBatchDmlRequest() + + +@pytest.mark.asyncio +async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( + )) + response = await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteBatchDmlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +@pytest.mark.asyncio +async def test_execute_batch_dml_async_from_dict(): + await test_execute_batch_dml_async(request_type=dict) + + +def test_execute_batch_dml_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = spanner.ExecuteBatchDmlResponse() + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_batch_dml_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) + await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_read(transport: str = 'grpc', request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_read_from_dict(): + test_read(request_type=dict) + + +def test_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + client.read() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + +@pytest.mark.asyncio +async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_read_async_from_dict(): + await test_read_async(request_type=dict) + + +def test_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_streaming_read(transport: str = 'grpc', request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_streaming_read_from_dict(): + test_streaming_read(request_type=dict) + + +def test_streaming_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + client.streaming_read() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + +@pytest.mark.asyncio +async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest() + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_streaming_read_async_from_dict(): + await test_streaming_read_async(request_type=dict) + + +def test_streaming_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_streaming_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_begin_transaction(transport: str = 'grpc', request_type=spanner.BeginTransactionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction( + id=b'id_blob', + ) + response = client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BeginTransactionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +def test_begin_transaction_from_dict(): + test_begin_transaction(request_type=dict) + + +def test_begin_transaction_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + client.begin_transaction() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BeginTransactionRequest() + + +@pytest.mark.asyncio +async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( + id=b'id_blob', + )) + response = await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BeginTransactionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +@pytest.mark.asyncio +async def test_begin_transaction_async_from_dict(): + await test_begin_transaction_async(request_type=dict) + + +def test_begin_transaction_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = transaction.Transaction() + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_begin_transaction_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_begin_transaction_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=None), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].options == transaction.TransactionOptions(read_write=None) + + +def test_begin_transaction_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=None), + ) + + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=None), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].options == transaction.TransactionOptions(read_write=None) + + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=None), + ) + + +def test_commit(transport: str = 'grpc', request_type=spanner.CommitRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse( + ) + response = client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CommitRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +def test_commit_from_dict(): + test_commit(request_type=dict) + + +def test_commit_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + client.commit() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CommitRequest() + + +@pytest.mark.asyncio +async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( + )) + response = await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CommitRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +@pytest.mark.asyncio +async def test_commit_async_from_dict(): + await test_commit_async(request_type=dict) + + +def test_commit_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = commit_response.CommitResponse() + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_commit_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=None), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].mutations == [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=None) + + +def test_commit_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=None), + ) + + +@pytest.mark.asyncio +async def test_commit_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=None), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].mutations == [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=None) + + +@pytest.mark.asyncio +async def test_commit_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=None), + ) + + +def test_rollback(transport: str = 'grpc', request_type=spanner.RollbackRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.RollbackRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_rollback_from_dict(): + test_rollback(request_type=dict) + + +def test_rollback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + client.rollback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.RollbackRequest() + + +@pytest.mark.asyncio +async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.RollbackRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_rollback_async_from_dict(): + await test_rollback_async(request_type=dict) + + +def test_rollback_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_rollback_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].transaction_id == b'transaction_id_blob' + + +def test_rollback_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +@pytest.mark.asyncio +async def test_rollback_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].session == 'session_value' + assert args[0].transaction_id == b'transaction_id_blob' + + +@pytest.mark.asyncio +async def test_rollback_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +def test_partition_query(transport: str = 'grpc', request_type=spanner.PartitionQueryRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_query_from_dict(): + test_partition_query(request_type=dict) + + +def test_partition_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + client.partition_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionQueryRequest() + + +@pytest.mark.asyncio +async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_query_async_from_dict(): + await test_partition_query_async(request_type=dict) + + +def test_partition_query_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_query_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_partition_read(transport: str = 'grpc', request_type=spanner.PartitionReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionReadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_read_from_dict(): + test_partition_read(request_type=dict) + + +def test_partition_read_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + client.partition_read() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionReadRequest() + + +@pytest.mark.asyncio +async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionReadRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_read_async_from_dict(): + await test_partition_read_async(request_type=dict) + + +def test_partition_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session/value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SpannerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SpannerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SpannerGrpcTransport, + ) + +def test_spanner_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_spanner_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_session', + 'batch_create_sessions', + 'get_session', + 'list_sessions', + 'delete_session', + 'execute_sql', + 'execute_streaming_sql', + 'execute_batch_dml', + 'read', + 'streaming_read', + 'begin_transaction', + 'commit', + 'rollback', + 'partition_query', + 'partition_read', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + +def test_spanner_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id="octopus", + ) + + +def test_spanner_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport() + adc.assert_called_once() + + +def test_spanner_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SpannerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + ], +) +def test_spanner_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SpannerGrpcTransport, grpc_helpers), + (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_spanner_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_spanner_host_no_port(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + ) + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_spanner_host_with_port(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + ) + assert client.transport._host == 'spanner.googleapis.com:8000' + +def test_spanner_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_spanner_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_path(): + project = "squid" + instance = "clam" + database = "whelk" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = SpannerClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "database": "nudibranch", + } + path = SpannerClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_database_path(path) + assert expected == actual + +def test_session_path(): + project = "cuttlefish" + instance = "mussel" + database = "winkle" + session = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + actual = SpannerClient.session_path(project, instance, database, session) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "session": "clam", + } + path = SpannerClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_session_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SpannerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SpannerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = SpannerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SpannerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SpannerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SpannerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = SpannerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SpannerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SpannerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SpannerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + transport_class = SpannerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc new file mode 100644 index 0000000000..689cd57616 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_database/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in new file mode 100644 index 0000000000..550e337e01 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/spanner_admin_database *.py +recursive-include google/cloud/spanner_admin_database_v1 *.py diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst new file mode 100644 index 0000000000..12e0ba69de --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Spanner Admin Database API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Database API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py new file mode 100644 index 0000000000..5fb348c41a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-database documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-database" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-database-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-spanner-admin-database.tex", + u"google-cloud-spanner-admin-database Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-spanner-admin-database", + u"Google Cloud Spanner Admin Database Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-spanner-admin-database", + u"google-cloud-spanner-admin-database Documentation", + author, + "google-cloud-spanner-admin-database", + "GAPIC library for Google Cloud Spanner Admin Database API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst new file mode 100644 index 0000000000..e2947e7442 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_database_v1/services + spanner_admin_database_v1/types diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst new file mode 100644 index 0000000000..bd6aab00e4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst @@ -0,0 +1,10 @@ +DatabaseAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst new file mode 100644 index 0000000000..55e57d8dc0 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Database v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst new file mode 100644 index 0000000000..95e1d7f88b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Spanner Admin Database v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_database_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py new file mode 100644 index 0000000000..f93f373840 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient + +from google.cloud.spanner_admin_database_v1.types.backup import Backup +from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse +from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest +from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig +from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo +from google.cloud.spanner_admin_database_v1.types.common import OperationProgress +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType + +__all__ = ('DatabaseAdminClient', + 'DatabaseAdminAsyncClient', + 'Backup', + 'BackupInfo', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'GetBackupRequest', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..1aff0f8660 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.database_admin import DatabaseAdminClient +from .services.database_admin import DatabaseAdminAsyncClient + +from .types.backup import Backup +from .types.backup import BackupInfo +from .types.backup import CreateBackupEncryptionConfig +from .types.backup import CreateBackupMetadata +from .types.backup import CreateBackupRequest +from .types.backup import DeleteBackupRequest +from .types.backup import GetBackupRequest +from .types.backup import ListBackupOperationsRequest +from .types.backup import ListBackupOperationsResponse +from .types.backup import ListBackupsRequest +from .types.backup import ListBackupsResponse +from .types.backup import UpdateBackupRequest +from .types.common import EncryptionConfig +from .types.common import EncryptionInfo +from .types.common import OperationProgress +from .types.spanner_database_admin import CreateDatabaseMetadata +from .types.spanner_database_admin import CreateDatabaseRequest +from .types.spanner_database_admin import Database +from .types.spanner_database_admin import DropDatabaseRequest +from .types.spanner_database_admin import GetDatabaseDdlRequest +from .types.spanner_database_admin import GetDatabaseDdlResponse +from .types.spanner_database_admin import GetDatabaseRequest +from .types.spanner_database_admin import ListDatabaseOperationsRequest +from .types.spanner_database_admin import ListDatabaseOperationsResponse +from .types.spanner_database_admin import ListDatabasesRequest +from .types.spanner_database_admin import ListDatabasesResponse +from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from .types.spanner_database_admin import RestoreDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseRequest +from .types.spanner_database_admin import RestoreInfo +from .types.spanner_database_admin import UpdateDatabaseDdlMetadata +from .types.spanner_database_admin import UpdateDatabaseDdlRequest +from .types.spanner_database_admin import RestoreSourceType + +__all__ = ( + 'DatabaseAdminAsyncClient', +'Backup', +'BackupInfo', +'CreateBackupEncryptionConfig', +'CreateBackupMetadata', +'CreateBackupRequest', +'CreateDatabaseMetadata', +'CreateDatabaseRequest', +'Database', +'DatabaseAdminClient', +'DeleteBackupRequest', +'DropDatabaseRequest', +'EncryptionConfig', +'EncryptionInfo', +'GetBackupRequest', +'GetDatabaseDdlRequest', +'GetDatabaseDdlResponse', +'GetDatabaseRequest', +'ListBackupOperationsRequest', +'ListBackupOperationsResponse', +'ListBackupsRequest', +'ListBackupsResponse', +'ListDatabaseOperationsRequest', +'ListDatabaseOperationsResponse', +'ListDatabasesRequest', +'ListDatabasesResponse', +'OperationProgress', +'OptimizeRestoredDatabaseMetadata', +'RestoreDatabaseEncryptionConfig', +'RestoreDatabaseMetadata', +'RestoreDatabaseRequest', +'RestoreInfo', +'RestoreSourceType', +'UpdateBackupRequest', +'UpdateDatabaseDdlMetadata', +'UpdateDatabaseDdlRequest', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json new file mode 100644 index 0000000000..1460097dc3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json @@ -0,0 +1,193 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_database_v1", + "protoPackage": "google.spanner.admin.database.v1", + "schema": "1.0", + "services": { + "DatabaseAdmin": { + "clients": { + "grpc": { + "libraryClient": "DatabaseAdminClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DatabaseAdminAsyncClient", + "rpcs": { + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py new file mode 100644 index 0000000000..4de65971c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py new file mode 100644 index 0000000000..eb34609dc2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DatabaseAdminClient +from .async_client import DatabaseAdminAsyncClient + +__all__ = ( + 'DatabaseAdminClient', + 'DatabaseAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py new file mode 100644 index 0000000000..4a66aa471b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -0,0 +1,1968 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .client import DatabaseAdminClient + + +class DatabaseAdminAsyncClient: + """Cloud Spanner Database Admin API + The Cloud Spanner Database Admin API can be used to create, + drop, and list databases. It also enables updating the schema of + pre-existing databases. It can be also used to create, delete + and list backups for a database and to restore from an existing + backup. + """ + + _client: DatabaseAdminClient + + DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + + backup_path = staticmethod(DatabaseAdminClient.backup_path) + parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) + crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) + crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) + database_path = staticmethod(DatabaseAdminClient.database_path) + parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) + instance_path = staticmethod(DatabaseAdminClient.instance_path) + parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) + common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) + common_project_path = staticmethod(DatabaseAdminClient.common_project_path) + parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) + common_location_path = staticmethod(DatabaseAdminClient.common_location_path) + parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DatabaseAdminClient).get_transport_class, type(DatabaseAdminClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, DatabaseAdminTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DatabaseAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DatabaseAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_databases(self, + request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""Lists Cloud Spanner databases. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (:class:`str`): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_databases, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_database(self, + request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, + *, + parent: str = None, + create_statement: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (:class:`str`): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (:class:`str`): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, create_statement]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_database, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def get_database(self, + request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (:class:`str`): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_database, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database_ddl(self, + request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, + *, + database: str = None, + statements: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): + The request object. Enqueues the given DDL statements to + be applied, in order but not necessarily all at once, to + the database schema at some point (or points) in the + future. The server checks that the statements are + executable (syntactically valid, name tables that exist, + etc.) before enqueueing them, but they may still fail + upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + `NULL` value in a column to which `NOT NULL` would be + added). If a statement fails, all subsequent statements + in the batch are automatically cancelled. + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (:class:`str`): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (:class:`Sequence[str]`): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, statements]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements: + request.statements.extend(statements) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_database_ddl, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + async def drop_database(self, + request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (:class:`str`): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.DropDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.drop_database, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_database_ddl(self, + request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (:class:`str`): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.GetDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_database_ddl, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup(self, + request: Union[gsad_backup.CreateBackupRequest, dict] = None, + *, + parent: str = None, + backup: gsad_backup.Backup = None, + backup_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (:class:`str`): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = gsad_backup.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_backup, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + async def get_backup(self, + request: Union[backup.GetBackupRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (:class:`str`): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = backup.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup(self, + request: Union[gsad_backup.UpdateBackupRequest, dict] = None, + *, + backup: gsad_backup.Backup = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = gsad_backup.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup(self, + request: Union[backup.DeleteBackupRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (:class:`str`): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = backup.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backups(self, + request: Union[backup.ListBackupsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (:class:`str`): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = backup.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backups, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restore_database(self, + request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, + *, + parent: str = None, + database_id: str = None, + backup: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (:class:`str`): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`str`): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database_id, backup]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.RestoreDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.restore_database, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def list_database_operations(self, + request: Union[spanner_database_admin.ListDatabaseOperationsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabaseOperationsAsyncPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (:class:`str`): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_database_operations, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabaseOperationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_operations(self, + request: Union[backup.ListBackupOperationsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupOperationsAsyncPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (:class:`str`): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = backup.ListBackupOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_backup_operations, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupOperationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner-admin-database", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "DatabaseAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py new file mode 100644 index 0000000000..1430fa5d4c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -0,0 +1,2116 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DatabaseAdminGrpcTransport +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport + + +class DatabaseAdminClientMeta(type): + """Metaclass for the DatabaseAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] + _transport_registry["grpc"] = DatabaseAdminGrpcTransport + _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[DatabaseAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): + """Cloud Spanner Database Admin API + The Cloud Spanner Database Admin API can be used to create, + drop, and list databases. It also enables updating the schema of + pre-existing databases. It can be also used to create, delete + and list backups for a database and to restore from an existing + backup. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path(project: str,instance: str,backup: str,) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str,str]: + """Parses a backup path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str,str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DatabaseAdminTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DatabaseAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DatabaseAdminTransport): + # transport is a DatabaseAdminTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def list_databases(self, + request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabasesPager: + r"""Lists Cloud Spanner databases. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.ListDatabasesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.ListDatabasesRequest): + request = spanner_database_admin.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_database(self, + request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, + *, + parent: str = None, + create_statement: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (str): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, create_statement]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.CreateDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): + request = spanner_database_admin.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def get_database(self, + request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (str): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.GetDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.GetDatabaseRequest): + request = spanner_database_admin.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database_ddl(self, + request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, + *, + database: str = None, + statements: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): + The request object. Enqueues the given DDL statements to + be applied, in order but not necessarily all at once, to + the database schema at some point (or points) in the + future. The server checks that the statements are + executable (syntactically valid, name tables that exist, + etc.) before enqueueing them, but they may still fail + upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + `NULL` value in a column to which `NOT NULL` would be + added). If a statement fails, all subsequent statements + in the batch are automatically cancelled. + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (str): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (Sequence[str]): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database, statements]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.UpdateDatabaseDdlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements is not None: + request.statements = statements + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + def drop_database(self, + request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (str): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.DropDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.DropDatabaseRequest): + request = spanner_database_admin.DropDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.drop_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_database_ddl(self, + request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, + *, + database: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (str): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([database]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.GetDatabaseDdlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): + request = spanner_database_admin.GetDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for `SetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for `GetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for + `TestIamPermissions` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (Sequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup(self, + request: Union[gsad_backup.CreateBackupRequest, dict] = None, + *, + parent: str = None, + backup: gsad_backup.Backup = None, + backup_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (str): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup, backup_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a gsad_backup.CreateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsad_backup.CreateBackupRequest): + request = gsad_backup.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + def get_backup(self, + request: Union[backup.GetBackupRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a backup.GetBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup(self, + request: Union[gsad_backup.UpdateBackupRequest, dict] = None, + *, + backup: gsad_backup.Backup = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a gsad_backup.UpdateBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsad_backup.UpdateBackupRequest): + request = gsad_backup.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup(self, + request: Union[backup.DeleteBackupRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (str): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a backup.DeleteBackupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backups(self, + request: Union[backup.ListBackupsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (str): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a backup.ListBackupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restore_database(self, + request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, + *, + parent: str = None, + database_id: str = None, + backup: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (str): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (str): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, database_id, backup]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.RestoreDatabaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): + request = spanner_database_admin.RestoreDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + def list_database_operations(self, + request: Union[spanner_database_admin.ListDatabaseOperationsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatabaseOperationsPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (str): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_database_admin.ListDatabaseOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_database_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabaseOperationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_operations(self, + request: Union[backup.ListBackupOperationsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupOperationsPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (str): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a backup.ListBackupOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, backup.ListBackupOperationsRequest): + request = backup.ListBackupOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupOperationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner-admin-database", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "DatabaseAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py new file mode 100644 index 0000000000..cc8cb77f48 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.longrunning import operations_pb2 # type: ignore + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabasesResponse], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_database_admin.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupsResponse], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupsResponse]], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[backup.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsAsyncPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupOperationsResponse], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsAsyncPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py new file mode 100644 index 0000000000..9a0047f713 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DatabaseAdminTransport +from .grpc import DatabaseAdminGrpcTransport +from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] +_transport_registry['grpc'] = DatabaseAdminGrpcTransport +_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport + +__all__ = ( + 'DatabaseAdminTransport', + 'DatabaseAdminGrpcTransport', + 'DatabaseAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py new file mode 100644 index 0000000000..5ef2051c71 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -0,0 +1,467 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-spanner-admin-database', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class DatabaseAdminTransport(abc.ABC): + """Abstract transport class for DatabaseAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database_ddl: gapic_v1.method.wrap_method( + self.update_database_ddl, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.drop_database: gapic_v1.method.wrap_method( + self.drop_database, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database_ddl: gapic_v1.method.wrap_method( + self.get_database_ddl, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.restore_database: gapic_v1.method.wrap_method( + self.restore_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_operations: gapic_v1.method.wrap_method( + self.list_database_operations, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_operations: gapic_v1.method.wrap_method( + self.list_backup_operations, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Union[ + spanner_database_admin.ListDatabasesResponse, + Awaitable[spanner_database_admin.ListDatabasesResponse] + ]]: + raise NotImplementedError() + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Union[ + spanner_database_admin.Database, + Awaitable[spanner_database_admin.Database] + ]]: + raise NotImplementedError() + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Union[ + spanner_database_admin.GetDatabaseDdlResponse, + Awaitable[spanner_database_admin.GetDatabaseDdlResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Union[ + backup.Backup, + Awaitable[backup.Backup] + ]]: + raise NotImplementedError() + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Union[ + gsad_backup.Backup, + Awaitable[gsad_backup.Backup] + ]]: + raise NotImplementedError() + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Union[ + backup.ListBackupsResponse, + Awaitable[backup.ListBackupsResponse] + ]]: + raise NotImplementedError() + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Union[ + spanner_database_admin.ListDatabaseOperationsResponse, + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Union[ + backup.ListBackupOperationsResponse, + Awaitable[backup.ListBackupOperationsResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'DatabaseAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py new file mode 100644 index 0000000000..bcb77aea14 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -0,0 +1,802 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO + + +class DatabaseAdminGrpcTransport(DatabaseAdminTransport): + """gRPC backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + The Cloud Spanner Database Admin API can be used to create, + drop, and list databases. It also enables updating the schema of + pre-existing databases. It can be also used to create, delete + and list backups for a database and to restore from an existing + backup. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + spanner_database_admin.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + spanner_database_admin.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + empty_pb2.Empty]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. + + Returns: + Callable[[~.DropDatabaseRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + spanner_database_admin.GetDatabaseDdlResponse]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + ~.GetDatabaseDdlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + backup.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + gsad_backup.Backup]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + backup.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + spanner_database_admin.ListDatabaseOperationsResponse]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + ~.ListDatabaseOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + backup.ListBackupOperationsResponse]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + ~.ListBackupOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + def close(self): + self.grpc_channel.close() + +__all__ = ( + 'DatabaseAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..f34545f412 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -0,0 +1,806 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import DatabaseAdminGrpcTransport + + +class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): + """gRPC AsyncIO backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + The Cloud Spanner Database Admin API can be used to create, + drop, and list databases. It also enables updating the schema of + pre-existing databases. It can be also used to create, delete + and list backups for a database and to restore from an existing + backup. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Awaitable[spanner_database_admin.ListDatabasesResponse]]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Awaitable[spanner_database_admin.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. + + Returns: + Callable[[~.DropDatabaseRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + Awaitable[~.GetDatabaseDdlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Awaitable[backup.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Awaitable[gsad_backup.Backup]]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Awaitable[backup.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + Awaitable[~.ListDatabaseOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Awaitable[backup.ListBackupOperationsResponse]]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + Awaitable[~.ListBackupOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'DatabaseAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py new file mode 100644 index 0000000000..3fa15e4383 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .backup import ( + Backup, + BackupInfo, + CreateBackupEncryptionConfig, + CreateBackupMetadata, + CreateBackupRequest, + DeleteBackupRequest, + GetBackupRequest, + ListBackupOperationsRequest, + ListBackupOperationsResponse, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .common import ( + EncryptionConfig, + EncryptionInfo, + OperationProgress, +) +from .spanner_database_admin import ( + CreateDatabaseMetadata, + CreateDatabaseRequest, + Database, + DropDatabaseRequest, + GetDatabaseDdlRequest, + GetDatabaseDdlResponse, + GetDatabaseRequest, + ListDatabaseOperationsRequest, + ListDatabaseOperationsResponse, + ListDatabasesRequest, + ListDatabasesResponse, + OptimizeRestoredDatabaseMetadata, + RestoreDatabaseEncryptionConfig, + RestoreDatabaseMetadata, + RestoreDatabaseRequest, + RestoreInfo, + UpdateDatabaseDdlMetadata, + UpdateDatabaseDdlRequest, + RestoreSourceType, +) + +__all__ = ( + 'Backup', + 'BackupInfo', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'GetBackupRequest', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py new file mode 100644 index 0000000000..0d2469d461 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'Backup', + 'CreateBackupRequest', + 'CreateBackupMetadata', + 'UpdateBackupRequest', + 'GetBackupRequest', + 'DeleteBackupRequest', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'BackupInfo', + 'CreateBackupEncryptionConfig', + }, +) + + +class Backup(proto.Message): + r"""A backup of a Cloud Spanner database. + + Attributes: + database (str): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Name of the database from which this backup was + created. This needs to be in the same instance as the + backup. Values are of the form + ``projects//instances//databases/``. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup will contain an externally consistent copy of the + database at the timestamp specified by ``version_time``. If + ``version_time`` is not specified, the system will set + ``version_time`` to the ``create_time`` of the backup. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. The expiration time of the backup, with + microseconds granularity that must be at least 6 hours and + at most 366 days from the time the CreateBackup request is + processed. Once the ``expire_time`` has passed, the backup + is eligible to be automatically deleted by Cloud Spanner to + free the resources used by the backup. + name (str): + Output only for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Required for the + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] + operation. + + A globally unique identifier for the backup which cannot be + changed. Values are of the form + ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + + The backup is stored in the location(s) specified in the + instance configuration of the instance containing the + backup, identified by the prefix of the backup name of the + form ``projects//instances/``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request is received. If the request does not specify + ``version_time``, the ``version_time`` of the backup will be + equivalent to the ``create_time``. + size_bytes (int): + Output only. Size of the backup in bytes. + state (google.cloud.spanner_admin_database_v1.types.Backup.State): + Output only. The current state of the backup. + referencing_databases (Sequence[str]): + Output only. The names of the restored databases that + reference the backup. The database names are of the form + ``projects//instances//databases/``. + Referencing databases may exist in different instances. The + existence of any referencing database prevents the backup + from being deleted. When a restored database from the backup + enters the ``READY`` state, the reference to the backup is + removed. + encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): + Output only. The encryption information for + the backup. + """ + class State(proto.Enum): + r"""Indicates the current state of the backup.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + database = proto.Field( + proto.STRING, + number=2, + ) + version_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + name = proto.Field( + proto.STRING, + number=1, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + size_bytes = proto.Field( + proto.INT64, + number=5, + ) + state = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + referencing_databases = proto.RepeatedField( + proto.STRING, + number=7, + ) + encryption_info = proto.Field( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + + +class CreateBackupRequest(proto.Message): + r"""The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + parent (str): + Required. The name of the instance in which the backup will + be created. This must be the same instance that contains the + database the backup will be created from. The backup will be + stored in the location(s) specified in the instance + configuration of this instance. Values are of the form + ``projects//instances/``. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full backup + name of the form + ``projects//instances//backups/``. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration used to encrypt the + backup. If this field is not specified, the backup will use + the same encryption configuration as the database by + default, namely + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + = ``USE_DATABASE_ENCRYPTION``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + backup_id = proto.Field( + proto.STRING, + number=2, + ) + backup = proto.Field( + proto.MESSAGE, + number=3, + message='Backup', + ) + encryption_config = proto.Field( + proto.MESSAGE, + number=4, + message='CreateBackupEncryptionConfig', + ) + + +class CreateBackupMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + name (str): + The name of the backup being created. + database (str): + The name of the database the backup is + created from. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + database = proto.Field( + proto.STRING, + number=2, + ) + progress = proto.Field( + proto.MESSAGE, + number=3, + message=common.OperationProgress, + ) + cancel_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateBackupRequest(proto.Message): + r"""The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + + Attributes: + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only supported + for the following fields: + + - ``backup.expire_time``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be updated. + This mask is relative to the Backup resource, not to the + request message. The field mask must always be specified; + this prevents any future fields from being erased + accidentally by clients that do not know about them. + """ + + backup = proto.Field( + proto.MESSAGE, + number=1, + message='Backup', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetBackupRequest(proto.Message): + r"""The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + + Attributes: + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupRequest(proto.Message): + r"""The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + + Attributes: + name (str): + Required. Name of the backup to delete. Values are of the + form + ``projects//instances//backups/``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsRequest(proto.Message): + r"""The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + parent (str): + Required. The instance to list backups from. Values are of + the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backups. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Backup][google.spanner.admin.database.v1.Backup] are + eligible for filtering: + + - ``name`` + - ``database`` + - ``state`` + - ``create_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``expire_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``version_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``size_bytes`` + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``name:Howl`` - The backup's name contains the string + "howl". + - ``database:prod`` - The database's name contains the + string "prod". + - ``state:CREATING`` - The backup is pending creation. + - ``state:READY`` - The backup is fully created and ready + for use. + - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` + - The backup name contains the string "howl" and + ``create_time`` of the backup is before + 2018-03-28T14:50:00Z. + - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup + ``expire_time`` is before 2018-03-28T14:50:00Z. + - ``size_bytes > 10000000000`` - The backup's size is + greater than 10GB + page_size (int): + Number of backups to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] + from a previous + [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupsResponse(proto.Message): + r"""The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + backups (Sequence[google.cloud.spanner_admin_database_v1.types.Backup]): + The list of matching backups. Backups returned are ordered + by ``create_time`` in descending order, starting from the + most recent ``create_time``. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] + call to fetch more of the matching backups. + """ + + @property + def raw_page(self): + return self + + backups = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Backup', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBackupOperationsRequest(proto.Message): + r"""The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + parent (str): + Required. The instance of the backup operations. Values are + of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backup + operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. + - ``metadata.`` - any field in metadata.value. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``metadata.database:prod`` - The database the backup was + taken from has a name containing the string "prod". + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``(metadata.name:howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + - The backup name contains the string "howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] + from a previous + [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupOperationsResponse(proto.Message): + r"""The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + operations (Sequence[google.longrunning.operations_pb2.Operation]): + The list of matching backup [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the backup's name and the + operation's + [metadata][google.longrunning.Operation.metadata] will be of + type + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + Operations returned include those that are pending or have + completed/failed/canceled within the last 7 days. Operations + returned are ordered by + ``operation.metadata.value.progress.start_time`` in + descending order starting from the most recently started + operation. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class BackupInfo(proto.Message): + r"""Information about a backup. + + Attributes: + backup (str): + Name of the backup. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup contains an externally consistent copy of + ``source_database`` at the timestamp specified by + ``version_time``. If the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request did not specify ``version_time``, the + ``version_time`` of the backup is equivalent to the + ``create_time``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request was received. + source_database (str): + Name of the database the backup was created + from. + """ + + backup = proto.Field( + proto.STRING, + number=1, + ) + version_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + source_database = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateBackupEncryptionConfig(proto.Message): + r"""Encryption configuration for the backup to create. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): + Required. The encryption type of the backup. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to protect the + backup. This field should be set only when + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the backup.""" + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_DATABASE_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py new file mode 100644 index 0000000000..e1f6168dae --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'OperationProgress', + 'EncryptionConfig', + 'EncryptionInfo', + }, +) + + +class OperationProgress(proto.Message): + r"""Encapsulates progress related information for a Cloud Spanner + long running operation. + + Attributes: + progress_percent (int): + Percent completion of the operation. + Values are between 0 and 100 inclusive. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time the request was received. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If set, the time at which this operation + failed or was completed successfully. + """ + + progress_percent = proto.Field( + proto.INT32, + number=1, + ) + start_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EncryptionConfig(proto.Message): + r"""Encryption configuration for a Cloud Spanner database. + + Attributes: + kms_key_name (str): + The Cloud KMS key to be used for encrypting and decrypting + the database. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + """ + + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + + +class EncryptionInfo(proto.Message): + r"""Encryption information for a Cloud Spanner database or + backup. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): + Output only. The type of encryption. + encryption_status (google.rpc.status_pb2.Status): + Output only. If present, the status of a + recent encrypt/decrypt call on underlying data + for this database or backup. Regardless of + status, data is always encrypted at rest. + kms_key_version (str): + Output only. A Cloud KMS key version that is + being used to protect the database or backup. + """ + class Type(proto.Enum): + r"""Possible encryption types.""" + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + encryption_status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + kms_key_version = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py new file mode 100644 index 0000000000..0da1393227 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -0,0 +1,826 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'RestoreSourceType', + 'RestoreInfo', + 'Database', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'CreateDatabaseRequest', + 'CreateDatabaseMetadata', + 'GetDatabaseRequest', + 'UpdateDatabaseDdlRequest', + 'UpdateDatabaseDdlMetadata', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'RestoreDatabaseRequest', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'OptimizeRestoredDatabaseMetadata', + }, +) + + +class RestoreSourceType(proto.Enum): + r"""Indicates the type of the restore source.""" + TYPE_UNSPECIFIED = 0 + BACKUP = 1 + + +class RestoreInfo(proto.Message): + r"""Information about the database restore. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. The backup may no longer exist. + This field is a member of `oneof`_ ``source_info``. + """ + + source_type = proto.Field( + proto.ENUM, + number=1, + enum='RestoreSourceType', + ) + backup_info = proto.Field( + proto.MESSAGE, + number=2, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + + +class Database(proto.Message): + r"""A Cloud Spanner database. + + Attributes: + name (str): + Required. The name of the database. Values are of the form + ``projects//instances//databases/``, + where ```` is as specified in the + ``CREATE DATABASE`` statement. This name can be passed to + other API methods to identify the database. + state (google.cloud.spanner_admin_database_v1.types.Database.State): + Output only. The current database state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If exists, the time at which the + database creation started. + restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): + Output only. Applicable only for restored + databases. Contains information about the + restore source. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Output only. For databases that are using + customer managed encryption, this field contains + the encryption configuration for the database. + For databases that are using Google default or + other types of encryption, this field is empty. + encryption_info (Sequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): + Output only. For databases that are using + customer managed encryption, this field contains + the encryption information for the database, + such as encryption state and the Cloud KMS key + versions that are in use. + For databases that are using Google default or + other types of encryption, this field is empty. + + This field is propagated lazily from the + backend. There might be a delay from when a key + version is being used and when it appears in + this field. + version_retention_period (str): + Output only. The period in which Cloud Spanner retains all + versions of data for the database. This is the same as the + value of version_retention_period database option set using + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + Defaults to 1 hour, if not set. + earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Earliest timestamp at which + older versions of the data can be read. This + value is continuously updated by Cloud Spanner + and becomes stale the moment it is queried. If + you are using this value to recover data, make + sure to account for the time from the moment + when the value is queried to the moment when you + initiate the recovery. + default_leader (str): + Output only. The read-write region which contains the + database's leader replicas. + + This is the same as the value of default_leader database + option set using DatabaseAdmin.CreateDatabase or + DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this + is empty. + """ + class State(proto.Enum): + r"""Indicates the current state of the database.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + READY_OPTIMIZING = 3 + + name = proto.Field( + proto.STRING, + number=1, + ) + state = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + create_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + restore_info = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreInfo', + ) + encryption_config = proto.Field( + proto.MESSAGE, + number=5, + message=common.EncryptionConfig, + ) + encryption_info = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + version_retention_period = proto.Field( + proto.STRING, + number=6, + ) + earliest_version_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + default_leader = proto.Field( + proto.STRING, + number=9, + ) + + +class ListDatabasesRequest(proto.Message): + r"""The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + page_size (int): + Number of databases to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] + from a previous + [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabasesResponse(proto.Message): + r"""The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + databases (Sequence[google.cloud.spanner_admin_database_v1.types.Database]): + Databases that matched the request. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] + call to fetch more of the matching databases. + """ + + @property + def raw_page(self): + return self + + databases = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Database', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + parent (str): + Required. The name of the instance that will serve the new + database. Values are of the form + ``projects//instances/``. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which specifies + the ID of the new database. The database ID must conform to + the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be + between 2 and 30 characters in length. If the database ID is + a reserved word or if it contains a hyphen, the database ID + must be enclosed in backticks (:literal:`\``). + extra_statements (Sequence[str]): + Optional. A list of DDL statements to run + inside the newly created database. Statements + can create tables, indexes, etc. These + statements execute atomically with the creation + of the database: if there is an error in any + statement, the database is not created. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Optional. The encryption configuration for + the database. If this field is not specified, + Cloud Spanner will encrypt/decrypt all data at + rest using Google default encryption. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + create_statement = proto.Field( + proto.STRING, + number=2, + ) + extra_statements = proto.RepeatedField( + proto.STRING, + number=3, + ) + encryption_config = proto.Field( + proto.MESSAGE, + number=4, + message=common.EncryptionConfig, + ) + + +class CreateDatabaseMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + database (str): + The database being created. + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseRequest(proto.Message): + r"""The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + + Attributes: + name (str): + Required. The name of the requested database. Values are of + the form + ``projects//instances//databases/``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseDdlRequest(proto.Message): + r"""Enqueues the given DDL statements to be applied, in order but not + necessarily all at once, to the database schema at some point (or + points) in the future. The server checks that the statements are + executable (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon later execution + (e.g., if a statement from another batch of statements is applied + first and it conflicts in some way, or if there is some data-related + problem like a ``NULL`` value in a column to which ``NOT NULL`` + would be added). If a statement fails, all subsequent statements in + the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be used with + the [Operations][google.longrunning.Operations] API to monitor + progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + + Attributes: + database (str): + Required. The database to update. + statements (Sequence[str]): + Required. DDL statements to be applied to the + database. + operation_id (str): + If empty, the new update request is assigned an + automatically-generated operation ID. Otherwise, + ``operation_id`` is used to construct the name of the + resulting [Operation][google.longrunning.Operation]. + + Specifying an explicit operation ID simplifies determining + whether the statements were executed in the event that the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + call is replayed, or the return value is otherwise lost: the + [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] + and ``operation_id`` fields can be combined to form the + [name][google.longrunning.Operation.name] of the resulting + [longrunning.Operation][google.longrunning.Operation]: + ``/operations/``. + + ``operation_id`` should be unique within the database, and + must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that + automatically-generated operation IDs always begin with an + underscore. If the named operation already exists, + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + returns ``ALREADY_EXISTS``. + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + statements = proto.RepeatedField( + proto.STRING, + number=2, + ) + operation_id = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateDatabaseDdlMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + + Attributes: + database (str): + The database being modified. + statements (Sequence[str]): + For an update this list contains all the + statements. For an individual statement, this + list contains only that statement. + commit_timestamps (Sequence[google.protobuf.timestamp_pb2.Timestamp]): + Reports the commit timestamps of all statements that have + succeeded so far, where ``commit_timestamps[i]`` is the + commit timestamp for the statement ``statements[i]``. + throttled (bool): + Output only. When true, indicates that the + operation is throttled e.g due to resource + constraints. When resources become available the + operation will resume and this field will be + false again. + progress (Sequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): + The progress of the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + operations. Currently, only index creation statements will + have a continuously updating progress. For non-index + creation statements, ``progress[i]`` will have start time + and end time populated with commit timestamp of operation, + as well as a progress of 100% once the operation has + completed. ``progress[i]`` is the operation progress for + ``statements[i]``. + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + statements = proto.RepeatedField( + proto.STRING, + number=2, + ) + commit_timestamps = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + throttled = proto.Field( + proto.BOOL, + number=4, + ) + progress = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=common.OperationProgress, + ) + + +class DropDatabaseRequest(proto.Message): + r"""The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + + Attributes: + database (str): + Required. The database to be dropped. + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlRequest(proto.Message): + r"""The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + database (str): + Required. The database whose schema we wish to get. Values + are of the form + ``projects//instances//databases/`` + """ + + database = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlResponse(proto.Message): + r"""The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + statements (Sequence[str]): + A list of formatted DDL statements defining + the schema of the database specified in the + request. + """ + + statements = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class ListDatabaseOperationsRequest(proto.Message): + r"""The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + parent (str): + Required. The instance of the database operations. Values + are of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. + - ``metadata.`` - any field in metadata.value. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` + ``(metadata.source_type:BACKUP) AND`` + ``(metadata.backup_info.backup:backup_howl) AND`` + ``(metadata.name:restored_howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + - The database is restored from a backup. + - The backup name contains "backup_howl". + - The restored database's name contains "restored_howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] + from a previous + [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabaseOperationsResponse(proto.Message): + r"""The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + operations (Sequence[google.longrunning.operations_pb2.Operation]): + The list of matching database [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the database's name. The + operation's + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreDatabaseRequest(proto.Message): + r"""The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the instance in which to create the + restored database. This instance must be in the same project + and have the same instance configuration as the instance + containing the source backup. Values are of the form + ``projects//instances/``. + database_id (str): + Required. The id of the database to create and restore to. + This database must not already exist. The ``database_id`` + appended to ``parent`` forms the full database name of the + form + ``projects//instances//databases/``. + backup (str): + Name of the backup from which to restore. Values are of the + form + ``projects//instances//backups/``. + This field is a member of `oneof`_ ``source``. + encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): + Optional. An encryption configuration describing the + encryption type and key resources in Cloud KMS used to + encrypt/decrypt the database to restore to. If this field is + not specified, the restored database will use the same + encryption configuration as the backup by default, namely + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + database_id = proto.Field( + proto.STRING, + number=2, + ) + backup = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + encryption_config = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreDatabaseEncryptionConfig', + ) + + +class RestoreDatabaseEncryptionConfig(proto.Message): + r"""Encryption configuration for the restored database. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): + Required. The encryption type of the restored + database. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to + encrypt/decrypt the restored database. This field should be + set only when + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the database to be restored.""" + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation returned by + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the database being created and + restored to. + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. + This field is a member of `oneof`_ ``source_info``. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + optimize_database_operation_name (str): + If exists, the name of the long-running operation that will + be used to track the post-restore optimization process to + optimize the performance of the restored database, and + remove the dependency on the restore source. The name is of + the form + ``projects//instances//databases//operations/`` + where the is the name of database being created and restored + to. The metadata type of the long-running operation is + [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. + This long-running operation will be automatically created by + the system after the RestoreDatabase long-running operation + completes successfully. This operation will not be created + if the restore was not successful. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + source_type = proto.Field( + proto.ENUM, + number=2, + enum='RestoreSourceType', + ) + backup_info = proto.Field( + proto.MESSAGE, + number=3, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + progress = proto.Field( + proto.MESSAGE, + number=4, + message=common.OperationProgress, + ) + cancel_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + optimize_database_operation_name = proto.Field( + proto.STRING, + number=6, + ) + + +class OptimizeRestoredDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation used to track + the progress of optimizations performed on a newly restored + database. This long-running operation is automatically created + by the system after the successful completion of a database + restore, and cannot be cancelled. + + Attributes: + name (str): + Name of the restored database being + optimized. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the post-restore + optimizations. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + progress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini new file mode 100644 index 0000000000..4505b48543 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py new file mode 100644 index 0000000000..0106b9f16a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/spanner_admin_database_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.9') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.9') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py new file mode 100644 index 0000000000..cc4c78d884 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -0,0 +1,192 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_databaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), + 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', ), + 'delete_backup': ('name', ), + 'drop_database': ('database', ), + 'get_backup': ('name', ), + 'get_database': ('name', ), + 'get_database_ddl': ('database', ), + 'get_iam_policy': ('resource', 'options', ), + 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_backup': ('backup', 'update_mask', ), + 'update_database_ddl': ('database', 'statements', 'operation_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_databaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_database client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py new file mode 100644 index 0000000000..3ad2096c24 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/setup.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-spanner-admin-database', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.19.4', + 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py new file mode 100644 index 0000000000..50ad1d4b84 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -0,0 +1,5880 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.services.database_admin import transports +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None + assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, + DatabaseAdminAsyncClient, +]) +def test_database_admin_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DatabaseAdminGrpcTransport, "grpc"), + (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, + DatabaseAdminAsyncClient, +]) +def test_database_admin_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_database_admin_client_get_transport_class(): + transport = DatabaseAdminClient.get_transport_class() + available_transports = [ + transports.DatabaseAdminGrpcTransport, + ] + assert transport in available_transports + + transport = DatabaseAdminClient.get_transport_class("grpc") + assert transport == transports.DatabaseAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) +def test_database_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_database_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DatabaseAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_databases(transport: str = 'grpc', request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_databases_from_dict(): + test_list_databases(request_type=dict) + + +def test_list_databases_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + client.list_databases() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabasesRequest() + + +@pytest.mark.asyncio +async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabasesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + + +def test_list_databases_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_databases_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_databases_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_pager(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_databases(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in results) + +def test_list_databases_pages(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in responses) + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_databases(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_create_database(transport: str = 'grpc', request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_database_from_dict(): + test_create_database(request_type=dict) + + +def test_create_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + client.create_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.CreateDatabaseRequest() + + +@pytest.mark.asyncio +async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.CreateDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].create_statement == 'create_statement_value' + + +def test_create_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].create_statement == 'create_statement_value' + + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +def test_get_database(transport: str = 'grpc', request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + + +def test_get_database_from_dict(): + test_get_database(request_type=dict) + + +def test_get_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + client.get_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseRequest() + + +@pytest.mark.asyncio +async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + )) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + + +def test_get_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = spanner_database_admin.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +def test_update_database_ddl(transport: str = 'grpc', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_ddl_from_dict(): + test_update_database_ddl(request_type=dict) + + +def test_update_database_ddl_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + client.update_database_ddl() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() + + +@pytest.mark.asyncio +async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_ddl_async_from_dict(): + await test_update_database_ddl_async(request_type=dict) + + +def test_update_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_update_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + assert args[0].statements == ['statements_value'] + + +def test_update_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + assert args[0].statements == ['statements_value'] + + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +def test_drop_database(transport: str = 'grpc', request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.DropDatabaseRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_drop_database_from_dict(): + test_drop_database(request_type=dict) + + +def test_drop_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + client.drop_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.DropDatabaseRequest() + + +@pytest.mark.asyncio +async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.DropDatabaseRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_drop_database_async_from_dict(): + await test_drop_database_async(request_type=dict) + + +def test_drop_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = None + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_drop_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_drop_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +def test_drop_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +@pytest.mark.asyncio +async def test_drop_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +@pytest.mark.asyncio +async def test_drop_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +def test_get_database_ddl(transport: str = 'grpc', request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + ) + response = client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + + +def test_get_database_ddl_from_dict(): + test_get_database_ddl(request_type=dict) + + +def test_get_database_ddl_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + client.get_database_ddl() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() + + +@pytest.mark.asyncio +async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + )) + response = await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + + +@pytest.mark.asyncio +async def test_get_database_ddl_async_from_dict(): + await test_get_database_ddl_async(request_type=dict) + + +def test_get_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database/value', + ) in kw['metadata'] + + +def test_get_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +def test_get_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].database == 'database_value' + + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_set_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_get_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +def test_test_iam_permissions_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_create_backup(transport: str = 'grpc', request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_from_dict(): + test_create_backup(request_type=dict) + + +def test_create_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + client.create_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.CreateBackupRequest() + + +@pytest.mark.asyncio +async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.CreateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + + +def test_create_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].backup == gsad_backup.Backup(database='database_value') + assert args[0].backup_id == 'backup_id_value' + + +def test_create_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].backup == gsad_backup.Backup(database='database_value') + assert args[0].backup_id == 'backup_id_value' + + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +def test_get_backup(transport: str = 'grpc', request_type=backup.GetBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + + +def test_get_backup_from_dict(): + test_get_backup(request_type=dict) + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + )) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +def test_update_backup(transport: str = 'grpc', request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + ) + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + + +def test_update_backup_from_dict(): + test_update_backup(request_type=dict) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + )) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.UpdateBackupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'backup.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = gsad_backup.Backup() + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=backup.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'backup.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=backup.name/value', + ) in kw['metadata'] + + +def test_update_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].backup == gsad_backup.Backup(database='database_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].backup == gsad_backup.Backup(database='database_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_backup(transport: str = 'grpc', request_type=backup.DeleteBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_from_dict(): + test_delete_backup(request_type=dict) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +def test_list_backups(transport: str = 'grpc', request_type=backup.ListBackupsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backups_from_dict(): + test_list_backups(request_type=dict) + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_backups_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_backups_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_pager(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backups(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) + for i in results) + +def test_list_backups_pages(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup.Backup) + for i in responses) + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_backups(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_restore_database(transport: str = 'grpc', request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.RestoreDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_database_from_dict(): + test_restore_database(request_type=dict) + + +def test_restore_database_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + client.restore_database() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.RestoreDatabaseRequest() + + +@pytest.mark.asyncio +async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.RestoreDatabaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_database_async_from_dict(): + await test_restore_database_async(request_type=dict) + + +def test_restore_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restore_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_restore_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].database_id == 'database_id_value' + assert args[0].backup == 'backup_value' + + +def test_restore_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +@pytest.mark.asyncio +async def test_restore_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].database_id == 'database_id_value' + assert args[0].backup == 'backup_value' + + +@pytest.mark.asyncio +async def test_restore_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +def test_list_database_operations(transport: str = 'grpc', request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_database_operations_from_dict(): + test_list_database_operations(request_type=dict) + + +def test_list_database_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + client.list_database_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_database_operations_async_from_dict(): + await test_list_database_operations_async(request_type=dict) + + +def test_list_database_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_database_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_database_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_database_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +def test_list_database_operations_pager(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_database_operations(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + +def test_list_database_operations_pages(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_database_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_database_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_database_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + +@pytest.mark.asyncio +async def test_list_database_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_database_operations(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_list_backup_operations(transport: str = 'grpc', request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backup_operations_from_dict(): + test_list_backup_operations(request_type=dict) + + +def test_list_backup_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + client.list_backup_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backup_operations_async_from_dict(): + await test_list_backup_operations_async(request_type=dict) + + +def test_list_backup_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = backup.ListBackupOperationsResponse() + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_backup_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_backup_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +def test_list_backup_operations_pager(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backup_operations(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + +def test_list_backup_operations_pages(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_backup_operations(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatabaseAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatabaseAdminGrpcTransport, + ) + +def test_database_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_database_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_databases', + 'create_database', + 'get_database', + 'update_database_ddl', + 'drop_database', + 'get_database_ddl', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'create_backup', + 'get_backup', + 'update_backup', + 'delete_backup', + 'list_backups', + 'restore_database', + 'list_database_operations', + 'list_backup_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_database_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_database_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport() + adc.assert_called_once() + + +def test_database_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DatabaseAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + ], +) +def test_database_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatabaseAdminGrpcTransport, grpc_helpers), + (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_database_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_database_admin_host_no_port(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + ) + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_database_admin_host_with_port(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + ) + assert client.transport._host == 'spanner.googleapis.com:8000' + +def test_database_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_database_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_admin_grpc_lro_client(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_database_admin_grpc_lro_async_client(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + instance = "clam" + backup = "whelk" + expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + actual = DatabaseAdminClient.backup_path(project, instance, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "backup": "nudibranch", + } + path = DatabaseAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "cuttlefish" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = DatabaseAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_path(path) + assert expected == actual + +def test_crypto_key_version_path(): + project = "whelk" + location = "octopus" + key_ring = "oyster" + crypto_key = "nudibranch" + crypto_key_version = "cuttlefish" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "mussel", + "location": "winkle", + "key_ring": "nautilus", + "crypto_key": "scallop", + "crypto_key_version": "abalone", + } + path = DatabaseAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + +def test_database_path(): + project = "squid" + instance = "clam" + database = "whelk" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = DatabaseAdminClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "database": "nudibranch", + } + path = DatabaseAdminClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_database_path(path) + assert expected == actual + +def test_instance_path(): + project = "cuttlefish" + instance = "mussel" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = DatabaseAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "winkle", + "instance": "nautilus", + } + path = DatabaseAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_instance_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DatabaseAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DatabaseAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = DatabaseAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DatabaseAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DatabaseAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DatabaseAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = DatabaseAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DatabaseAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DatabaseAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DatabaseAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = DatabaseAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc new file mode 100644 index 0000000000..31f0b429ca --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_instance/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in new file mode 100644 index 0000000000..5af7430e8d --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/spanner_admin_instance *.py +recursive-include google/cloud/spanner_admin_instance_v1 *.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst new file mode 100644 index 0000000000..94972f5b29 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Spanner Admin Instance API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Instance API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py new file mode 100644 index 0000000000..658c054d8e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-instance documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-instance" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-spanner-admin-instance.tex", + u"google-cloud-spanner-admin-instance Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-spanner-admin-instance", + u"Google Cloud Spanner Admin Instance Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-spanner-admin-instance", + u"google-cloud-spanner-admin-instance Documentation", + author, + "google-cloud-spanner-admin-instance", + "GAPIC library for Google Cloud Spanner Admin Instance API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst new file mode 100644 index 0000000000..bc65940f95 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_instance_v1/services + spanner_admin_instance_v1/types diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst new file mode 100644 index 0000000000..fe820b3fad --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst @@ -0,0 +1,10 @@ +InstanceAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst new file mode 100644 index 0000000000..407d44cc34 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Instance v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst new file mode 100644 index 0000000000..8f7204ebce --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Spanner Admin Instance v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_instance_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py new file mode 100644 index 0000000000..84caf4479c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient + +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ('InstanceAdminClient', + 'InstanceAdminAsyncClient', + 'CreateInstanceMetadata', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'GetInstanceConfigRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'ReplicaInfo', + 'UpdateInstanceMetadata', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..971e008623 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.instance_admin import InstanceAdminClient +from .services.instance_admin import InstanceAdminAsyncClient + +from .types.spanner_instance_admin import CreateInstanceMetadata +from .types.spanner_instance_admin import CreateInstanceRequest +from .types.spanner_instance_admin import DeleteInstanceRequest +from .types.spanner_instance_admin import GetInstanceConfigRequest +from .types.spanner_instance_admin import GetInstanceRequest +from .types.spanner_instance_admin import Instance +from .types.spanner_instance_admin import InstanceConfig +from .types.spanner_instance_admin import ListInstanceConfigsRequest +from .types.spanner_instance_admin import ListInstanceConfigsResponse +from .types.spanner_instance_admin import ListInstancesRequest +from .types.spanner_instance_admin import ListInstancesResponse +from .types.spanner_instance_admin import ReplicaInfo +from .types.spanner_instance_admin import UpdateInstanceMetadata +from .types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ( + 'InstanceAdminAsyncClient', +'CreateInstanceMetadata', +'CreateInstanceRequest', +'DeleteInstanceRequest', +'GetInstanceConfigRequest', +'GetInstanceRequest', +'Instance', +'InstanceAdminClient', +'InstanceConfig', +'ListInstanceConfigsRequest', +'ListInstanceConfigsResponse', +'ListInstancesRequest', +'ListInstancesResponse', +'ReplicaInfo', +'UpdateInstanceMetadata', +'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json new file mode 100644 index 0000000000..6fee5bcd53 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json @@ -0,0 +1,123 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_instance_v1", + "protoPackage": "google.spanner.admin.instance.v1", + "schema": "1.0", + "services": { + "InstanceAdmin": { + "clients": { + "grpc": { + "libraryClient": "InstanceAdminClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "InstanceAdminAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py new file mode 100644 index 0000000000..4de65971c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py new file mode 100644 index 0000000000..1e59529b7e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceAdminClient +from .async_client import InstanceAdminAsyncClient + +__all__ = ( + 'InstanceAdminClient', + 'InstanceAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py new file mode 100644 index 0000000000..d252771f56 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -0,0 +1,1301 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .client import InstanceAdminClient + + +class InstanceAdminAsyncClient: + """Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + _client: InstanceAdminClient + + DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(InstanceAdminClient.instance_path) + parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) + instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) + parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) + common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) + common_project_path = staticmethod(InstanceAdminClient.common_project_path) + parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) + common_location_path = staticmethod(InstanceAdminClient.common_location_path) + parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(InstanceAdminClient).get_transport_class, type(InstanceAdminClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, InstanceAdminTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.InstanceAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = InstanceAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_instance_configs(self, + request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstanceConfigsAsyncPager: + r"""Lists the supported instance configurations for a + given project. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (:class:`str`): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instance_configs, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance_config(self, + request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (:class:`str`): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.GetInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance_config, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instances(self, + request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all instances in the given project. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (:class:`str`): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance(self, + request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (:class:`str`): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance(self, + request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, + *, + parent: str = None, + instance_id: str = None, + instance: spanner_instance_admin.Instance = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance and begins preparing it to begin serving. + The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (:class:`str`): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def update_instance(self, + request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, + *, + instance: spanner_instance_admin.Instance = None, + field_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the instance. If the named instance + does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all + resource changes, after which point it terminates with a + ``CANCELLED`` status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, field_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance(self, + request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (:class:`str`): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = spanner_instance_admin.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy(self, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for `SetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for `GetIamPolicy` + method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for + `TestIamPermissions` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`Sequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner-admin-instance", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "InstanceAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py new file mode 100644 index 0000000000..9df413e835 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -0,0 +1,1464 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +OptionalRetry = Union[retries.Retry, object] + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import InstanceAdminGrpcTransport +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport + + +class InstanceAdminClientMeta(type): + """Metaclass for the InstanceAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] + _transport_registry["grpc"] = InstanceAdminGrpcTransport + _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[InstanceAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceAdminClient(metaclass=InstanceAdminClientMeta): + """Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_config_path(project: str,instance_config: str,) -> str: + """Returns a fully-qualified instance_config string.""" + return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + + @staticmethod + def parse_instance_config_path(path: str) -> Dict[str,str]: + """Parses a instance_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, InstanceAdminTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, InstanceAdminTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, InstanceAdminTransport): + # transport is a InstanceAdminTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def list_instance_configs(self, + request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstanceConfigsPager: + r"""Lists the supported instance configurations for a + given project. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.ListInstanceConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance_config(self, + request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (str): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.GetInstanceConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): + request = spanner_instance_admin.GetInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instances(self, + request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all instances in the given project. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.ListInstancesRequest): + request = spanner_instance_admin.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (str): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.GetInstanceRequest): + request = spanner_instance_admin.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, + *, + parent: str = None, + instance_id: str = None, + instance: spanner_instance_admin.Instance = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an instance and begins preparing it to begin serving. + The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): + request = spanner_instance_admin.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, + *, + instance: spanner_instance_admin.Instance = None, + field_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the instance. If the named instance + does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all + resource changes, after which point it terminates with a + ``CANCELLED`` status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, field_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): + request = spanner_instance_admin.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (str): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a spanner_instance_admin.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): + request = spanner_instance_admin.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy(self, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for `SetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, + *, + resource: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for `GetIamPolicy` + method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, + *, + resource: str = None, + permissions: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for + `TestIamPermissions` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (Sequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-spanner-admin-instance", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "InstanceAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py new file mode 100644 index 0000000000..add08e57e4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin + + +class ListInstanceConfigsPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: + for page in self.pages: + yield from page.instance_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigsAsyncPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancesResponse], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py new file mode 100644 index 0000000000..6f2453770b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceAdminTransport +from .grpc import InstanceAdminGrpcTransport +from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] +_transport_registry['grpc'] = InstanceAdminGrpcTransport +_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport + +__all__ = ( + 'InstanceAdminTransport', + 'InstanceAdminGrpcTransport', + 'InstanceAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py new file mode 100644 index 0000000000..8d272f2bea --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-spanner-admin-instance', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class InstanceAdminTransport(abc.ABC): + """Abstract transport class for InstanceAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instance_configs: gapic_v1.method.wrap_method( + self.list_instance_configs, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance_config: gapic_v1.method.wrap_method( + self.get_instance_config, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( +initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Union[ + spanner_instance_admin.ListInstanceConfigsResponse, + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Union[ + spanner_instance_admin.InstanceConfig, + Awaitable[spanner_instance_admin.InstanceConfig] + ]]: + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Union[ + spanner_instance_admin.ListInstancesResponse, + Awaitable[spanner_instance_admin.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Union[ + spanner_instance_admin.Instance, + Awaitable[spanner_instance_admin.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'InstanceAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py new file mode 100644 index 0000000000..cfe155bdd7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO + + +class InstanceAdminGrpcTransport(InstanceAdminTransport): + """gRPC backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + spanner_instance_admin.ListInstanceConfigsResponse]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + ~.ListInstanceConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + spanner_instance_admin.InstanceConfig]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + ~.InstanceConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + spanner_instance_admin.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + spanner_instance_admin.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the instance. If the named instance + does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all + resource changes, after which point it terminates with a + ``CANCELLED`` status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + self.grpc_channel.close() + +__all__ = ( + 'InstanceAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..8787614293 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -0,0 +1,642 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import InstanceAdminGrpcTransport + + +class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): + """gRPC AsyncIO backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + Awaitable[~.ListInstanceConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Awaitable[spanner_instance_admin.InstanceConfig]]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + Awaitable[~.InstanceConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Awaitable[spanner_instance_admin.ListInstancesResponse]]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Awaitable[spanner_instance_admin.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the instance. If the named instance + does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all + resource changes, after which point it terminates with a + ``CANCELLED`` status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'InstanceAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py new file mode 100644 index 0000000000..1577fa9bb7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .spanner_instance_admin import ( + CreateInstanceMetadata, + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceConfigRequest, + GetInstanceRequest, + Instance, + InstanceConfig, + ListInstanceConfigsRequest, + ListInstanceConfigsResponse, + ListInstancesRequest, + ListInstancesResponse, + ReplicaInfo, + UpdateInstanceMetadata, + UpdateInstanceRequest, +) + +__all__ = ( + 'CreateInstanceMetadata', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'GetInstanceConfigRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'ReplicaInfo', + 'UpdateInstanceMetadata', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py new file mode 100644 index 0000000000..eb7e698e97 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -0,0 +1,605 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.instance.v1', + manifest={ + 'ReplicaInfo', + 'InstanceConfig', + 'Instance', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'GetInstanceConfigRequest', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'CreateInstanceMetadata', + 'UpdateInstanceMetadata', + }, +) + + +class ReplicaInfo(proto.Message): + r""" + + Attributes: + location (str): + The location of the serving resources, e.g. + "us-central1". + type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): + The type of replica. + default_leader_location (bool): + If true, this location is designated as the default leader + location where leader replicas are placed. See the `region + types + documentation `__ + for more details. + """ + class ReplicaType(proto.Enum): + r"""Indicates the type of replica. See the `replica types + documentation `__ + for more details. + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + WITNESS = 3 + + location = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=ReplicaType, + ) + default_leader_location = proto.Field( + proto.BOOL, + number=3, + ) + + +class InstanceConfig(proto.Message): + r"""A possible configuration for a Cloud Spanner instance. + Configurations define the geographic placement of nodes and + their replication. + + Attributes: + name (str): + A unique identifier for the instance configuration. Values + are of the form + ``projects//instanceConfigs/[a-z][-a-z0-9]*`` + display_name (str): + The name of this instance configuration as it + appears in UIs. + replicas (Sequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): + The geographic placement of nodes in this + instance configuration and their replication + properties. + leader_options (Sequence[str]): + Allowed values of the “default_leader” schema option for + databases in instances that use this instance configuration. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + replicas = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ReplicaInfo', + ) + leader_options = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class Instance(proto.Message): + r"""An isolated set of Cloud Spanner resources on which databases + can be hosted. + + Attributes: + name (str): + Required. A unique identifier for the instance, which cannot + be changed after the instance is created. Values are of the + form + ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. + The final segment of the name must be between 2 and 64 + characters in length. + config (str): + Required. The name of the instance's configuration. Values + are of the form + ``projects//instanceConfigs/``. See + also + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + and + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + display_name (str): + Required. The descriptive name for this + instance as it appears in UIs. Must be unique + per project and between 4 and 30 characters in + length. + node_count (int): + Required. The number of nodes allocated to this instance. + This may be zero in API responses for instances that are not + yet in state ``READY``. + + See `the + documentation `__ + for more information about nodes. + processing_units (int): + The number of processing units allocated to this instance. + At most one of processing_units or node_count should be + present in the message. This may be zero in API responses + for instances that are not yet in state ``READY``. + state (google.cloud.spanner_admin_instance_v1.types.Instance.State): + Output only. The current instance state. For + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], + the state must be either omitted or set to ``CREATING``. For + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], + the state must be either omitted or set to ``READY``. + labels (Sequence[google.cloud.spanner_admin_instance_v1.types.Instance.LabelsEntry]): + Cloud Labels are a flexible and lightweight mechanism for + organizing cloud resources into groups that reflect a + customer's organizational needs and deployment strategies. + Cloud Labels can be used to filter collections of resources. + They can be used to control how resource metrics are + aggregated. And they can be used as arguments to policy + management rules (e.g. route, firewall, load balancing, + etc.). + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + - No more than 64 labels can be associated with a given + resource. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + + If you plan to use labels in your own code, please note that + additional characters may be allowed in the future. And so + you are advised to use an internal label representation, + such as JSON, which doesn't rely upon specific characters + being disallowed. For example, representing labels as the + string: name + "*" + value would prove problematic if we + were to allow "*" in a future release. + endpoint_uris (Sequence[str]): + Deprecated. This field is not populated. + """ + class State(proto.Enum): + r"""Indicates the current state of the instance.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.STRING, + number=2, + ) + display_name = proto.Field( + proto.STRING, + number=3, + ) + node_count = proto.Field( + proto.INT32, + number=5, + ) + processing_units = proto.Field( + proto.INT32, + number=9, + ) + state = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + endpoint_uris = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class ListInstanceConfigsRequest(proto.Message): + r"""The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values are + of the form ``projects/``. + page_size (int): + Number of instance configurations to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] + from a previous + [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInstanceConfigsResponse(proto.Message): + r"""The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + instance_configs (Sequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): + The list of requested instance + configurations. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] + call to fetch more of the matching instance configurations. + """ + + @property + def raw_page(self): + return self + + instance_configs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstanceConfigRequest(proto.Message): + r"""The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + + Attributes: + name (str): + Required. The name of the requested instance configuration. + Values are of the form + ``projects//instanceConfigs/``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class GetInstanceRequest(proto.Message): + r"""The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + + Attributes: + name (str): + Required. The name of the requested instance. Values are of + the form ``projects//instances/``. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + If field_mask is present, specifies the subset of + [Instance][google.spanner.admin.instance.v1.Instance] fields + that should be returned. If absent, all + [Instance][google.spanner.admin.instance.v1.Instance] fields + are returned. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + field_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateInstanceRequest(proto.Message): + r"""The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and + must be between 2 and 64 characters in length. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be omitted, + but if specified must be + ``/instances/``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + + +class ListInstancesRequest(proto.Message): + r"""The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + page_size (int): + Number of instances to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] + from a previous + [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``name`` + - ``display_name`` + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``name:*`` --> The instance has a name. + - ``name:Howl`` --> The instance's name contains the string + "howl". + - ``name:HOWL`` --> Equivalent to above. + - ``NAME:howl`` --> Equivalent to above. + - ``labels.env:*`` --> The instance has the label "env". + - ``labels.env:dev`` --> The instance has the label "env" + and the value of the label contains the string "dev". + - ``name:howl labels.env:dev`` --> The instance's name + contains "howl" and it has the label "env" with its value + containing "dev". + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + + +class ListInstancesResponse(proto.Message): + r"""The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + instances (Sequence[google.cloud.spanner_admin_instance_v1.types.Instance]): + The list of requested instances. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] + call to fetch more of the matching instances. + """ + + @property + def raw_page(self): + return self + + instances = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always include + the instance name. Otherwise, only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] should + be updated. The field mask must always be specified; this + prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] from + being erased accidentally by clients that do not know about + them. + """ + + instance = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + field_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + + Attributes: + name (str): + Required. The name of the instance to be deleted. Values are + of the form ``projects//instances/`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The instance being created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The desired end state of the update. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini new file mode 100644 index 0000000000..4505b48543 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py new file mode 100644 index 0000000000..0d39830270 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/spanner_admin_instance_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.9') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.9') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py new file mode 100644 index 0000000000..afbc7517bc --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_instanceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_instance': ('name', 'field_mask', ), + 'get_instance_config': ('name', ), + 'list_instance_configs': ('parent', 'page_size', 'page_token', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_instance': ('instance', 'field_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_instanceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_instance client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py new file mode 100644 index 0000000000..7c84e27a5b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/setup.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-spanner-admin-instance', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.19.4', + 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..b54a5fcc42 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py new file mode 100644 index 0000000000..6af4075fb9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -0,0 +1,3779 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceAdminClient._get_default_mtls_endpoint(None) is None + assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, + InstanceAdminAsyncClient, +]) +def test_instance_admin_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceAdminGrpcTransport, "grpc"), + (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, + InstanceAdminAsyncClient, +]) +def test_instance_admin_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_instance_admin_client_get_transport_class(): + transport = InstanceAdminClient.get_transport_class() + available_transports = [ + transports.InstanceAdminGrpcTransport, + ] + assert transport in available_transports + + transport = InstanceAdminClient.get_transport_class("grpc") + assert transport == transports.InstanceAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) +def test_instance_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_instance_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = InstanceAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_instance_configs(transport: str = 'grpc', request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instance_configs_from_dict(): + test_list_instance_configs(request_type=dict) + + +def test_list_instance_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + client.list_instance_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instance_configs_async_from_dict(): + await test_list_instance_configs_async(request_type=dict) + + +def test_list_instance_configs_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_configs_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_instance_configs_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_instance_configs_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +def test_list_instance_configs_pager(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_configs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in results) + +def test_list_instance_configs_pages(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pager(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in responses) + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pages(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instance_configs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_instance_config(transport: str = 'grpc', request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + leader_options=['leader_options_value'], + ) + response = client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.leader_options == ['leader_options_value'] + + +def test_get_instance_config_from_dict(): + test_get_instance_config(request_type=dict) + + +def test_get_instance_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + client.get_instance_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() + + +@pytest.mark.asyncio +async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + leader_options=['leader_options_value'], + )) + response = await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.leader_options == ['leader_options_value'] + + +@pytest.mark.asyncio +async def test_get_instance_config_async_from_dict(): + await test_get_instance_config_async(request_type=dict) + + +def test_get_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = spanner_instance_admin.InstanceConfig() + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +def test_list_instances(transport: str = 'grpc', request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_instances_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_instances_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_pager(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in results) + +def test_list_instances_pages(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in responses) + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_instance(transport: str = 'grpc', request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + endpoint_uris=['endpoint_uris_value'], + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.endpoint_uris == ['endpoint_uris_value'] + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + endpoint_uris=['endpoint_uris_value'], + )) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.endpoint_uris == ['endpoint_uris_value'] + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = spanner_instance_admin.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance(transport: str = 'grpc', request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].instance_id == 'instance_id_value' + assert args[0].instance == spanner_instance_admin.Instance(name='name_value') + + +def test_create_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].instance_id == 'instance_id_value' + assert args[0].instance == spanner_instance_admin.Instance(name='name_value') + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +def test_update_instance(transport: str = 'grpc', request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_from_dict(): + test_update_instance(request_type=dict) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'instance.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=instance.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'instance.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=instance.name/value', + ) in kw['metadata'] + + +def test_update_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].instance == spanner_instance_admin.Instance(name='name_value') + assert args[0].field_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].instance == spanner_instance_admin.Instance(name='name_value') + assert args[0].field_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance(transport: str = 'grpc', request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = None + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_from_dict(): + test_set_iam_policy(request_type=dict) + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_set_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_from_dict(): + test_get_iam_policy(request_type=dict) + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +def test_get_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_from_dict(): + test_test_iam_permissions(request_type=dict) + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource/value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +def test_test_iam_permissions_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource == 'resource_value' + assert args[0].permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.InstanceAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.InstanceAdminGrpcTransport, + ) + +def test_instance_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instance_configs', + 'get_instance_config', + 'list_instances', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_instance_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_instance_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport() + adc.assert_called_once() + + +def test_instance_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + ], +) +def test_instance_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.InstanceAdminGrpcTransport, grpc_helpers), + (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_instance_admin_host_no_port(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + ) + assert client.transport._host == 'spanner.googleapis.com:443' + + +def test_instance_admin_host_with_port(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + ) + assert client.transport._host == 'spanner.googleapis.com:8000' + +def test_instance_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_instance_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_instance_admin_grpc_lro_client(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_admin_grpc_lro_async_client(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + instance = "clam" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = InstanceAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "whelk", + "instance": "octopus", + } + path = InstanceAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_path(path) + assert expected == actual + +def test_instance_config_path(): + project = "oyster" + instance_config = "nudibranch" + expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + actual = InstanceAdminClient.instance_config_path(project, instance_config) + assert expected == actual + + +def test_parse_instance_config_path(): + expected = { + "project": "cuttlefish", + "instance_config": "mussel", + } + path = InstanceAdminClient.instance_config_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_config_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = InstanceAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = InstanceAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = InstanceAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = InstanceAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = InstanceAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = InstanceAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From 55873ba66b2843286c477211eae8010c4cb82301 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sat, 30 Oct 2021 02:02:19 +0000 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../services/database_admin/async_client.py | 114 +- .../services/database_admin/client.py | 126 +- .../services/database_admin/pagers.py | 36 +- .../database_admin/transports/base.py | 48 +- .../database_admin/transports/grpc.py | 11 +- .../database_admin/transports/grpc_asyncio.py | 12 +- .../spanner_admin_database_v1/types/backup.py | 3 + .../spanner_admin_database_v1/types/common.py | 1 + .../types/spanner_database_admin.py | 14 + .../services/instance_admin/async_client.py | 70 +- .../services/instance_admin/client.py | 82 +- .../services/instance_admin/pagers.py | 20 +- .../instance_admin/transports/base.py | 48 +- .../instance_admin/transports/grpc.py | 11 +- .../instance_admin/transports/grpc_asyncio.py | 12 +- .../types/spanner_instance_admin.py | 1 + .../services/spanner/async_client.py | 100 +- .../spanner_v1/services/spanner/client.py | 112 +- .../spanner_v1/services/spanner/pagers.py | 12 +- .../services/spanner/transports/base.py | 46 +- .../services/spanner/transports/grpc.py | 9 +- .../spanner/transports/grpc_asyncio.py | 10 +- .../cloud/spanner_v1/types/commit_response.py | 2 + google/cloud/spanner_v1/types/keys.py | 11 + google/cloud/spanner_v1/types/mutation.py | 13 + google/cloud/spanner_v1/types/result_set.py | 9 + google/cloud/spanner_v1/types/spanner.py | 16 + google/cloud/spanner_v1/types/transaction.py | 38 +- google/cloud/spanner_v1/types/type.py | 1 + owl-bot-staging/spanner/v1/.coveragerc | 17 - owl-bot-staging/spanner/v1/MANIFEST.in | 2 - owl-bot-staging/spanner/v1/README.rst | 49 - owl-bot-staging/spanner/v1/docs/conf.py | 376 -- owl-bot-staging/spanner/v1/docs/index.rst | 7 - .../spanner/v1/docs/spanner_v1/services.rst | 6 - .../spanner/v1/docs/spanner_v1/spanner.rst | 10 - .../spanner/v1/docs/spanner_v1/types.rst | 7 - .../v1/google/cloud/spanner/__init__.py | 97 - .../spanner/v1/google/cloud/spanner/py.typed | 2 - .../v1/google/cloud/spanner_v1/__init__.py | 98 - .../cloud/spanner_v1/gapic_metadata.json | 173 - .../v1/google/cloud/spanner_v1/py.typed | 2 - .../cloud/spanner_v1/services/__init__.py | 15 - .../spanner_v1/services/spanner/__init__.py | 22 - .../services/spanner/async_client.py | 1486 ----- .../spanner_v1/services/spanner/client.py | 1621 ----- .../spanner_v1/services/spanner/pagers.py | 140 - .../services/spanner/transports/__init__.py | 33 - .../services/spanner/transports/base.py | 425 -- .../services/spanner/transports/grpc.py | 756 --- .../spanner/transports/grpc_asyncio.py | 760 --- .../google/cloud/spanner_v1/types/__init__.py | 108 - .../cloud/spanner_v1/types/commit_response.py | 75 - .../v1/google/cloud/spanner_v1/types/keys.py | 240 - .../google/cloud/spanner_v1/types/mutation.py | 192 - .../cloud/spanner_v1/types/query_plan.py | 200 - .../cloud/spanner_v1/types/result_set.py | 316 - .../google/cloud/spanner_v1/types/spanner.py | 1263 ---- .../cloud/spanner_v1/types/transaction.py | 596 -- .../v1/google/cloud/spanner_v1/types/type.py | 137 - owl-bot-staging/spanner/v1/mypy.ini | 3 - owl-bot-staging/spanner/v1/noxfile.py | 132 - .../v1/scripts/fixup_spanner_v1_keywords.py | 190 - owl-bot-staging/spanner/v1/setup.py | 54 - owl-bot-staging/spanner/v1/tests/__init__.py | 16 - .../spanner/v1/tests/unit/__init__.py | 16 - .../spanner/v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/spanner_v1/__init__.py | 16 - .../unit/gapic/spanner_v1/test_spanner.py | 4046 ------------ .../spanner_admin_database/v1/.coveragerc | 17 - .../spanner_admin_database/v1/MANIFEST.in | 2 - .../spanner_admin_database/v1/README.rst | 49 - .../spanner_admin_database/v1/docs/conf.py | 376 -- .../spanner_admin_database/v1/docs/index.rst | 7 - .../database_admin.rst | 10 - .../spanner_admin_database_v1/services.rst | 6 - .../docs/spanner_admin_database_v1/types.rst | 7 - .../cloud/spanner_admin_database/__init__.py | 91 - .../cloud/spanner_admin_database/py.typed | 2 - .../spanner_admin_database_v1/__init__.py | 92 - .../gapic_metadata.json | 193 - .../cloud/spanner_admin_database_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/database_admin/__init__.py | 22 - .../services/database_admin/async_client.py | 1968 ------ .../services/database_admin/client.py | 2116 ------ .../services/database_admin/pagers.py | 508 -- .../database_admin/transports/__init__.py | 33 - .../database_admin/transports/base.py | 467 -- .../database_admin/transports/grpc.py | 802 --- .../database_admin/transports/grpc_asyncio.py | 806 --- .../types/__init__.py | 92 - .../spanner_admin_database_v1/types/backup.py | 638 -- .../spanner_admin_database_v1/types/common.py | 117 - .../types/spanner_database_admin.py | 826 --- .../spanner_admin_database/v1/mypy.ini | 3 - .../spanner_admin_database/v1/noxfile.py | 132 - ...ixup_spanner_admin_database_v1_keywords.py | 192 - .../spanner_admin_database/v1/setup.py | 55 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_database_v1/__init__.py | 16 - .../test_database_admin.py | 5880 ----------------- .../spanner_admin_instance/v1/.coveragerc | 17 - .../spanner_admin_instance/v1/MANIFEST.in | 2 - .../spanner_admin_instance/v1/README.rst | 49 - .../spanner_admin_instance/v1/docs/conf.py | 376 -- .../spanner_admin_instance/v1/docs/index.rst | 7 - .../instance_admin.rst | 10 - .../spanner_admin_instance_v1/services.rst | 6 - .../docs/spanner_admin_instance_v1/types.rst | 7 - .../cloud/spanner_admin_instance/__init__.py | 51 - .../cloud/spanner_admin_instance/py.typed | 2 - .../spanner_admin_instance_v1/__init__.py | 52 - .../gapic_metadata.json | 123 - .../cloud/spanner_admin_instance_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/instance_admin/__init__.py | 22 - .../services/instance_admin/async_client.py | 1301 ---- .../services/instance_admin/client.py | 1464 ---- .../services/instance_admin/pagers.py | 262 - .../instance_admin/transports/__init__.py | 33 - .../instance_admin/transports/base.py | 325 - .../instance_admin/transports/grpc.py | 638 -- .../instance_admin/transports/grpc_asyncio.py | 642 -- .../types/__init__.py | 48 - .../types/spanner_instance_admin.py | 605 -- .../spanner_admin_instance/v1/mypy.ini | 3 - .../spanner_admin_instance/v1/noxfile.py | 132 - ...ixup_spanner_admin_instance_v1_keywords.py | 185 - .../spanner_admin_instance/v1/setup.py | 55 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_instance_v1/__init__.py | 16 - .../test_instance_admin.py | 3779 ----------- ...ixup_spanner_admin_database_v1_keywords.py | 36 +- ...ixup_spanner_admin_instance_v1_keywords.py | 22 +- scripts/fixup_spanner_v1_keywords.py | 32 +- .../test_database_admin.py | 150 +- .../test_instance_admin.py | 150 +- tests/unit/gapic/spanner_v1/test_spanner.py | 147 +- 143 files changed, 772 insertions(+), 40340 deletions(-) delete mode 100644 owl-bot-staging/spanner/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner/v1/README.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py delete mode 100644 owl-bot-staging/spanner/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py delete mode 100644 owl-bot-staging/spanner/v1/setup.py delete mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index d9178c81a4..c2983b61ed 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.spanner_admin_database_v1.services.database_admin import pagers @@ -190,17 +192,17 @@ def __init__( async def list_databases( self, - request: spanner_database_admin.ListDatabasesRequest = None, + request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatabasesAsyncPager: r"""Lists Cloud Spanner databases. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): The request object. The request for [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. parent (:class:`str`): @@ -281,11 +283,11 @@ async def list_databases( async def create_database( self, - request: spanner_database_admin.CreateDatabaseRequest = None, + request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, *, parent: str = None, create_statement: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -301,7 +303,7 @@ async def create_database( successful. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): The request object. The request for [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. parent (:class:`str`): @@ -388,17 +390,17 @@ async def create_database( async def get_database( self, - request: spanner_database_admin.GetDatabaseRequest = None, + request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_database_admin.Database: r"""Gets the state of a Cloud Spanner database. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): The request object. The request for [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. name (:class:`str`): @@ -468,11 +470,11 @@ async def get_database( async def update_database_ddl( self, - request: spanner_database_admin.UpdateDatabaseDdlRequest = None, + request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, *, database: str = None, statements: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -487,7 +489,7 @@ async def update_database_ddl( The operation has no response. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): The request object. Enqueues the given DDL statements to be applied, in order but not necessarily all at once, to the database schema at some point (or points) in the @@ -603,10 +605,10 @@ async def update_database_ddl( async def drop_database( self, - request: spanner_database_admin.DropDatabaseRequest = None, + request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -615,7 +617,7 @@ async def drop_database( ``expire_time``. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): The request object. The request for [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. database (:class:`str`): @@ -677,10 +679,10 @@ async def drop_database( async def get_database_ddl( self, - request: spanner_database_admin.GetDatabaseDdlRequest = None, + request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_database_admin.GetDatabaseDdlResponse: @@ -690,7 +692,7 @@ async def get_database_ddl( [Operations][google.longrunning.Operations] API. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): The request object. The request for [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. database (:class:`str`): @@ -762,10 +764,10 @@ async def get_database_ddl( async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -780,7 +782,7 @@ async def set_iam_policy( [resource][google.iam.v1.SetIamPolicyRequest.resource]. Args: - request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): The request object. Request message for `SetIamPolicy` method. resource (:class:`str`): @@ -896,10 +898,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -915,7 +917,7 @@ async def get_iam_policy( [resource][google.iam.v1.GetIamPolicyRequest.resource]. Args: - request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): The request object. Request message for `GetIamPolicy` method. resource (:class:`str`): @@ -1041,11 +1043,11 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, *, resource: str = None, permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1061,7 +1063,7 @@ async def test_iam_permissions( permission on the containing instance. Args: - request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): The request object. Request message for `TestIamPermissions` method. resource (:class:`str`): @@ -1133,12 +1135,12 @@ async def test_iam_permissions( async def create_backup( self, - request: gsad_backup.CreateBackupRequest = None, + request: Union[gsad_backup.CreateBackupRequest, dict] = None, *, parent: str = None, backup: gsad_backup.Backup = None, backup_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -1157,7 +1159,7 @@ async def create_backup( databases can run concurrently. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.CreateBackupRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): The request object. The request for [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. parent (:class:`str`): @@ -1252,10 +1254,10 @@ async def create_backup( async def get_backup( self, - request: backup.GetBackupRequest = None, + request: Union[backup.GetBackupRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> backup.Backup: @@ -1263,7 +1265,7 @@ async def get_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.GetBackupRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): The request object. The request for [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. name (:class:`str`): @@ -1332,11 +1334,11 @@ async def get_backup( async def update_backup( self, - request: gsad_backup.UpdateBackupRequest = None, + request: Union[gsad_backup.UpdateBackupRequest, dict] = None, *, backup: gsad_backup.Backup = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gsad_backup.Backup: @@ -1344,7 +1346,7 @@ async def update_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): The request object. The request for [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): @@ -1433,10 +1435,10 @@ async def update_backup( async def delete_backup( self, - request: backup.DeleteBackupRequest = None, + request: Union[backup.DeleteBackupRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1444,7 +1446,7 @@ async def delete_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): The request object. The request for [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. name (:class:`str`): @@ -1509,10 +1511,10 @@ async def delete_backup( async def list_backups( self, - request: backup.ListBackupsRequest = None, + request: Union[backup.ListBackupsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBackupsAsyncPager: @@ -1521,7 +1523,7 @@ async def list_backups( the most recent ``create_time``. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.ListBackupsRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): The request object. The request for [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. parent (:class:`str`): @@ -1601,12 +1603,12 @@ async def list_backups( async def restore_database( self, - request: spanner_database_admin.RestoreDatabaseRequest = None, + request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, *, parent: str = None, database_id: str = None, backup: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -1631,7 +1633,7 @@ async def restore_database( first restore to complete. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): The request object. The request for [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. parent (:class:`str`): @@ -1728,10 +1730,12 @@ async def restore_database( async def list_database_operations( self, - request: spanner_database_admin.ListDatabaseOperationsRequest = None, + request: Union[ + spanner_database_admin.ListDatabaseOperationsRequest, dict + ] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatabaseOperationsAsyncPager: @@ -1747,7 +1751,7 @@ async def list_database_operations( operations. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): The request object. The request for [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. parent (:class:`str`): @@ -1828,10 +1832,10 @@ async def list_database_operations( async def list_backup_operations( self, - request: backup.ListBackupOperationsRequest = None, + request: Union[backup.ListBackupOperationsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBackupOperationsAsyncPager: @@ -1849,7 +1853,7 @@ async def list_backup_operations( order starting from the most recently started operation. Args: - request (:class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest`): + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): The request object. The request for [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. parent (:class:`str`): @@ -1928,6 +1932,12 @@ async def list_backup_operations( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index 1100d160c5..8d4765268b 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.spanner_admin_database_v1.services.database_admin import pagers @@ -435,25 +437,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_databases( self, - request: spanner_database_admin.ListDatabasesRequest = None, + request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatabasesPager: r"""Lists Cloud Spanner databases. Args: - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): The request object. The request for [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. parent (str): @@ -524,11 +523,11 @@ def list_databases( def create_database( self, - request: spanner_database_admin.CreateDatabaseRequest = None, + request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, *, parent: str = None, create_statement: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -544,7 +543,7 @@ def create_database( successful. Args: - request (google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): The request object. The request for [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. parent (str): @@ -631,17 +630,17 @@ def create_database( def get_database( self, - request: spanner_database_admin.GetDatabaseRequest = None, + request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_database_admin.Database: r"""Gets the state of a Cloud Spanner database. Args: - request (google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): The request object. The request for [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. name (str): @@ -701,11 +700,11 @@ def get_database( def update_database_ddl( self, - request: spanner_database_admin.UpdateDatabaseDdlRequest = None, + request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, *, database: str = None, statements: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -720,7 +719,7 @@ def update_database_ddl( The operation has no response. Args: - request (google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): The request object. Enqueues the given DDL statements to be applied, in order but not necessarily all at once, to the database schema at some point (or points) in the @@ -826,10 +825,10 @@ def update_database_ddl( def drop_database( self, - request: spanner_database_admin.DropDatabaseRequest = None, + request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -838,7 +837,7 @@ def drop_database( ``expire_time``. Args: - request (google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): The request object. The request for [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. database (str): @@ -890,10 +889,10 @@ def drop_database( def get_database_ddl( self, - request: spanner_database_admin.GetDatabaseDdlRequest = None, + request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_database_admin.GetDatabaseDdlResponse: @@ -903,7 +902,7 @@ def get_database_ddl( [Operations][google.longrunning.Operations] API. Args: - request (google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): The request object. The request for [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. database (str): @@ -965,10 +964,10 @@ def get_database_ddl( def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -983,7 +982,7 @@ def set_iam_policy( [resource][google.iam.v1.SetIamPolicyRequest.resource]. Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): The request object. Request message for `SetIamPolicy` method. resource (str): @@ -1098,10 +1097,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1117,7 +1116,7 @@ def get_iam_policy( [resource][google.iam.v1.GetIamPolicyRequest.resource]. Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): The request object. Request message for `GetIamPolicy` method. resource (str): @@ -1232,11 +1231,11 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, *, resource: str = None, permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1252,7 +1251,7 @@ def test_iam_permissions( permission on the containing instance. Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): The request object. Request message for `TestIamPermissions` method. resource (str): @@ -1323,12 +1322,12 @@ def test_iam_permissions( def create_backup( self, - request: gsad_backup.CreateBackupRequest = None, + request: Union[gsad_backup.CreateBackupRequest, dict] = None, *, parent: str = None, backup: gsad_backup.Backup = None, backup_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -1347,7 +1346,7 @@ def create_backup( databases can run concurrently. Args: - request (google.cloud.spanner_admin_database_v1.types.CreateBackupRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): The request object. The request for [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. parent (str): @@ -1442,10 +1441,10 @@ def create_backup( def get_backup( self, - request: backup.GetBackupRequest = None, + request: Union[backup.GetBackupRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> backup.Backup: @@ -1453,7 +1452,7 @@ def get_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (google.cloud.spanner_admin_database_v1.types.GetBackupRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): The request object. The request for [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. name (str): @@ -1512,11 +1511,11 @@ def get_backup( def update_backup( self, - request: gsad_backup.UpdateBackupRequest = None, + request: Union[gsad_backup.UpdateBackupRequest, dict] = None, *, backup: gsad_backup.Backup = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gsad_backup.Backup: @@ -1524,7 +1523,7 @@ def update_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): The request object. The request for [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. backup (google.cloud.spanner_admin_database_v1.types.Backup): @@ -1603,10 +1602,10 @@ def update_backup( def delete_backup( self, - request: backup.DeleteBackupRequest = None, + request: Union[backup.DeleteBackupRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1614,7 +1613,7 @@ def delete_backup( [Backup][google.spanner.admin.database.v1.Backup]. Args: - request (google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): The request object. The request for [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. name (str): @@ -1669,10 +1668,10 @@ def delete_backup( def list_backups( self, - request: backup.ListBackupsRequest = None, + request: Union[backup.ListBackupsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBackupsPager: @@ -1681,7 +1680,7 @@ def list_backups( the most recent ``create_time``. Args: - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): The request object. The request for [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. parent (str): @@ -1751,12 +1750,12 @@ def list_backups( def restore_database( self, - request: spanner_database_admin.RestoreDatabaseRequest = None, + request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, *, parent: str = None, database_id: str = None, backup: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -1781,7 +1780,7 @@ def restore_database( first restore to complete. Args: - request (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): The request object. The request for [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. parent (str): @@ -1878,10 +1877,12 @@ def restore_database( def list_database_operations( self, - request: spanner_database_admin.ListDatabaseOperationsRequest = None, + request: Union[ + spanner_database_admin.ListDatabaseOperationsRequest, dict + ] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListDatabaseOperationsPager: @@ -1897,7 +1898,7 @@ def list_database_operations( operations. Args: - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): The request object. The request for [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. parent (str): @@ -1970,10 +1971,10 @@ def list_database_operations( def list_backup_operations( self, - request: backup.ListBackupOperationsRequest = None, + request: Union[backup.ListBackupOperationsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBackupOperationsPager: @@ -1991,7 +1992,7 @@ def list_backup_operations( order starting from the most recently started operation. Args: - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): The request object. The request for [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. parent (str): @@ -2060,6 +2061,19 @@ def list_backup_operations( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py index 552f761751..a14ed07855 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.spanner_admin_database_v1.types import backup @@ -76,14 +76,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[spanner_database_admin.ListDatabasesResponse]: + def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[spanner_database_admin.Database]: + def __iter__(self) -> Iterator[spanner_database_admin.Database]: for page in self.pages: yield from page.databases @@ -140,14 +140,14 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterable[spanner_database_admin.ListDatabasesResponse]: + ) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[spanner_database_admin.Database]: + def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: async def async_generator(): async for page in self.pages: for response in page.databases: @@ -206,14 +206,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[backup.ListBackupsResponse]: + def pages(self) -> Iterator[backup.ListBackupsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[backup.Backup]: + def __iter__(self) -> Iterator[backup.Backup]: for page in self.pages: yield from page.backups @@ -268,14 +268,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[backup.ListBackupsResponse]: + async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[backup.Backup]: + def __aiter__(self) -> AsyncIterator[backup.Backup]: async def async_generator(): async for page in self.pages: for response in page.backups: @@ -334,14 +334,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[spanner_database_admin.ListDatabaseOperationsResponse]: + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[operations_pb2.Operation]: + def __iter__(self) -> Iterator[operations_pb2.Operation]: for page in self.pages: yield from page.operations @@ -400,14 +400,14 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterable[spanner_database_admin.ListDatabaseOperationsResponse]: + ) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[operations_pb2.Operation]: + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: async def async_generator(): async for page in self.pages: for response in page.operations: @@ -466,14 +466,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[backup.ListBackupOperationsResponse]: + def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[operations_pb2.Operation]: + def __iter__(self) -> Iterator[operations_pb2.Operation]: for page in self.pages: yield from page.operations @@ -528,14 +528,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[backup.ListBackupOperationsResponse]: + async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[operations_pb2.Operation]: + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: async def async_generator(): async for page in self.pages: for response in page.operations: diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index ec8cafa77f..61c4af22ab 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -44,15 +43,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class DatabaseAdminTransport(abc.ABC): """Abstract transport class for DatabaseAdmin.""" @@ -105,7 +95,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -127,7 +117,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -138,29 +128,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -363,8 +330,17 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index 00c46cf906..4c8cb7d7f2 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -92,16 +92,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -122,7 +122,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -815,5 +815,8 @@ def list_backup_operations( ) return self._stubs["list_backup_operations"] + def close(self): + self.grpc_channel.close() + __all__ = ("DatabaseAdminGrpcTransport",) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index 49832746ea..32c692486d 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -139,16 +138,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -169,7 +168,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -833,5 +832,8 @@ def list_backup_operations( ) return self._stubs["list_backup_operations"] + def close(self): + return self.grpc_channel.close() + __all__ = ("DatabaseAdminGrpcAsyncIOTransport",) diff --git a/google/cloud/spanner_admin_database_v1/types/backup.py b/google/cloud/spanner_admin_database_v1/types/backup.py index 0ddc815570..486503f344 100644 --- a/google/cloud/spanner_admin_database_v1/types/backup.py +++ b/google/cloud/spanner_admin_database_v1/types/backup.py @@ -42,6 +42,7 @@ class Backup(proto.Message): r"""A backup of a Cloud Spanner database. + Attributes: database (str): Required for the @@ -461,6 +462,7 @@ def raw_page(self): class BackupInfo(proto.Message): r"""Information about a backup. + Attributes: backup (str): Name of the backup. @@ -491,6 +493,7 @@ class BackupInfo(proto.Message): class CreateBackupEncryptionConfig(proto.Message): r"""Encryption configuration for the backup to create. + Attributes: encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): Required. The encryption type of the backup. diff --git a/google/cloud/spanner_admin_database_v1/types/common.py b/google/cloud/spanner_admin_database_v1/types/common.py index 38020dcd4e..b0c47fdb66 100644 --- a/google/cloud/spanner_admin_database_v1/types/common.py +++ b/google/cloud/spanner_admin_database_v1/types/common.py @@ -47,6 +47,7 @@ class OperationProgress(proto.Message): class EncryptionConfig(proto.Message): r"""Encryption configuration for a Cloud Spanner database. + Attributes: kms_key_name (str): The Cloud KMS key to be used for encrypting and decrypting diff --git a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py index e7aee2ac1e..5680983dee 100644 --- a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -55,12 +55,16 @@ class RestoreSourceType(proto.Enum): class RestoreInfo(proto.Message): r"""Information about the database restore. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): The type of the restore source. backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): Information about the backup used to restore the database. The backup may no longer exist. + This field is a member of `oneof`_ ``source_info``. """ source_type = proto.Field(proto.ENUM, number=1, enum="RestoreSourceType",) @@ -71,6 +75,7 @@ class RestoreInfo(proto.Message): class Database(proto.Message): r"""A Cloud Spanner database. + Attributes: name (str): Required. The name of the database. Values are of the form @@ -510,6 +515,9 @@ class RestoreDatabaseRequest(proto.Message): r"""The request for [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: parent (str): Required. The name of the instance in which to create the @@ -527,6 +535,7 @@ class RestoreDatabaseRequest(proto.Message): Name of the backup from which to restore. Values are of the form ``projects//instances//backups/``. + This field is a member of `oneof`_ ``source``. encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): Optional. An encryption configuration describing the encryption type and key resources in Cloud KMS used to @@ -547,6 +556,7 @@ class RestoreDatabaseRequest(proto.Message): class RestoreDatabaseEncryptionConfig(proto.Message): r"""Encryption configuration for the restored database. + Attributes: encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): Required. The encryption type of the restored @@ -575,6 +585,9 @@ class RestoreDatabaseMetadata(proto.Message): r"""Metadata type for the long-running operation returned by [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Name of the database being created and @@ -584,6 +597,7 @@ class RestoreDatabaseMetadata(proto.Message): backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): Information about the backup used to restore the database. + This field is a member of `oneof`_ ``source_info``. progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): The progress of the [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index 2b52431771..019e8f0f48 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers @@ -193,10 +195,10 @@ def __init__( async def list_instance_configs( self, - request: spanner_instance_admin.ListInstanceConfigsRequest = None, + request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstanceConfigsAsyncPager: @@ -204,7 +206,7 @@ async def list_instance_configs( given project. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): The request object. The request for [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. parent (:class:`str`): @@ -285,10 +287,10 @@ async def list_instance_configs( async def get_instance_config( self, - request: spanner_instance_admin.GetInstanceConfigRequest = None, + request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_instance_admin.InstanceConfig: @@ -296,7 +298,7 @@ async def get_instance_config( configuration. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): The request object. The request for [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. name (:class:`str`): @@ -370,17 +372,17 @@ async def get_instance_config( async def list_instances( self, - request: spanner_instance_admin.ListInstancesRequest = None, + request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists all instances in the given project. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): The request object. The request for [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. parent (:class:`str`): @@ -461,17 +463,17 @@ async def list_instances( async def get_instance( self, - request: spanner_instance_admin.GetInstanceRequest = None, + request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_instance_admin.Instance: r"""Gets information about a particular instance. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): The request object. The request for [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. name (:class:`str`): @@ -543,12 +545,12 @@ async def get_instance( async def create_instance( self, - request: spanner_instance_admin.CreateInstanceRequest = None, + request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, instance: spanner_instance_admin.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -592,7 +594,7 @@ async def create_instance( successful. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): The request object. The request for [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. parent (:class:`str`): @@ -685,11 +687,11 @@ async def create_instance( async def update_instance( self, - request: spanner_instance_admin.UpdateInstanceRequest = None, + request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, *, instance: spanner_instance_admin.Instance = None, field_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -740,7 +742,7 @@ async def update_instance( [name][google.spanner.admin.instance.v1.Instance.name]. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): The request object. The request for [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): @@ -832,10 +834,10 @@ async def update_instance( async def delete_instance( self, - request: spanner_instance_admin.DeleteInstanceRequest = None, + request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -852,7 +854,7 @@ async def delete_instance( is permanently deleted. Args: - request (:class:`google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest`): + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): The request object. The request for [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. name (:class:`str`): @@ -917,10 +919,10 @@ async def delete_instance( async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -931,7 +933,7 @@ async def set_iam_policy( [resource][google.iam.v1.SetIamPolicyRequest.resource]. Args: - request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): The request object. Request message for `SetIamPolicy` method. resource (:class:`str`): @@ -1047,10 +1049,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1062,7 +1064,7 @@ async def get_iam_policy( [resource][google.iam.v1.GetIamPolicyRequest.resource]. Args: - request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): The request object. Request message for `GetIamPolicy` method. resource (:class:`str`): @@ -1188,11 +1190,11 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, *, resource: str = None, permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1205,7 +1207,7 @@ async def test_iam_permissions( Cloud Project. Otherwise returns an empty set of permissions. Args: - request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): The request object. Request message for `TestIamPermissions` method. resource (:class:`str`): @@ -1275,6 +1277,12 @@ async def test_iam_permissions( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index 2f6187e0a2..9847a2d3f9 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers @@ -381,18 +383,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_instance_configs( self, - request: spanner_instance_admin.ListInstanceConfigsRequest = None, + request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstanceConfigsPager: @@ -400,7 +399,7 @@ def list_instance_configs( given project. Args: - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): The request object. The request for [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. parent (str): @@ -471,10 +470,10 @@ def list_instance_configs( def get_instance_config( self, - request: spanner_instance_admin.GetInstanceConfigRequest = None, + request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_instance_admin.InstanceConfig: @@ -482,7 +481,7 @@ def get_instance_config( configuration. Args: - request (google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): The request object. The request for [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. name (str): @@ -546,17 +545,17 @@ def get_instance_config( def list_instances( self, - request: spanner_instance_admin.ListInstancesRequest = None, + request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists all instances in the given project. Args: - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): The request object. The request for [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. parent (str): @@ -627,17 +626,17 @@ def list_instances( def get_instance( self, - request: spanner_instance_admin.GetInstanceRequest = None, + request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner_instance_admin.Instance: r"""Gets information about a particular instance. Args: - request (google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): The request object. The request for [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. name (str): @@ -699,12 +698,12 @@ def get_instance( def create_instance( self, - request: spanner_instance_admin.CreateInstanceRequest = None, + request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, instance: spanner_instance_admin.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -748,7 +747,7 @@ def create_instance( successful. Args: - request (google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): The request object. The request for [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. parent (str): @@ -841,11 +840,11 @@ def create_instance( def update_instance( self, - request: spanner_instance_admin.UpdateInstanceRequest = None, + request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, *, instance: spanner_instance_admin.Instance = None, field_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -896,7 +895,7 @@ def update_instance( [name][google.spanner.admin.instance.v1.Instance.name]. Args: - request (google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): The request object. The request for [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. instance (google.cloud.spanner_admin_instance_v1.types.Instance): @@ -988,10 +987,10 @@ def update_instance( def delete_instance( self, - request: spanner_instance_admin.DeleteInstanceRequest = None, + request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1008,7 +1007,7 @@ def delete_instance( is permanently deleted. Args: - request (google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest): + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): The request object. The request for [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. name (str): @@ -1063,10 +1062,10 @@ def delete_instance( def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1077,7 +1076,7 @@ def set_iam_policy( [resource][google.iam.v1.SetIamPolicyRequest.resource]. Args: - request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): The request object. Request message for `SetIamPolicy` method. resource (str): @@ -1192,10 +1191,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1207,7 +1206,7 @@ def get_iam_policy( [resource][google.iam.v1.GetIamPolicyRequest.resource]. Args: - request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): The request object. Request message for `GetIamPolicy` method. resource (str): @@ -1322,11 +1321,11 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, *, resource: str = None, permissions: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: @@ -1339,7 +1338,7 @@ def test_iam_permissions( Cloud Project. Otherwise returns an empty set of permissions. Args: - request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): The request object. Request message for `TestIamPermissions` method. resource (str): @@ -1408,6 +1407,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py index ba00792d47..670978ab27 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[spanner_instance_admin.ListInstanceConfigsResponse]: + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[spanner_instance_admin.InstanceConfig]: + def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: for page in self.pages: yield from page.instance_configs @@ -140,14 +140,14 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterable[spanner_instance_admin.ListInstanceConfigsResponse]: + ) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[spanner_instance_admin.InstanceConfig]: + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: async def async_generator(): async for page in self.pages: for response in page.instance_configs: @@ -206,14 +206,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[spanner_instance_admin.ListInstancesResponse]: + def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[spanner_instance_admin.Instance]: + def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: for page in self.pages: yield from page.instances @@ -270,14 +270,14 @@ def __getattr__(self, name: str) -> Any: @property async def pages( self, - ) -> AsyncIterable[spanner_instance_admin.ListInstancesResponse]: + ) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[spanner_instance_admin.Instance]: + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: async def async_generator(): async for page in self.pages: for response in page.instances: diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index 78ff62b585..6eb4ecfa29 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -42,15 +41,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InstanceAdminTransport(abc.ABC): """Abstract transport class for InstanceAdmin.""" @@ -103,7 +93,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -125,7 +115,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -136,29 +126,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -268,8 +235,17 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index 6f2c4caa6e..efd82a03a3 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -105,16 +105,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -135,7 +135,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -651,5 +651,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + self.grpc_channel.close() + __all__ = ("InstanceAdminGrpcTransport",) diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index 3e573e71c0..15854b790e 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -152,16 +151,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -182,7 +181,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -661,5 +660,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + return self.grpc_channel.close() + __all__ = ("InstanceAdminGrpcAsyncIOTransport",) diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py index e55a5961b0..51d4fbcc25 100644 --- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -42,6 +42,7 @@ class ReplicaInfo(proto.Message): r""" + Attributes: location (str): The location of the serving resources, e.g. diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index 6b8e199b8f..508e120d0e 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.spanner_v1.services.spanner import pagers from google.cloud.spanner_v1.types import commit_response from google.cloud.spanner_v1.types import mutation @@ -167,10 +169,10 @@ def __init__( async def create_session( self, - request: spanner.CreateSessionRequest = None, + request: Union[spanner.CreateSessionRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.Session: @@ -195,7 +197,7 @@ async def create_session( periodically, e.g., ``"SELECT 1"``. Args: - request (:class:`google.cloud.spanner_v1.types.CreateSessionRequest`): + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): The request object. The request for [CreateSession][google.spanner.v1.Spanner.CreateSession]. database (:class:`str`): @@ -263,11 +265,11 @@ async def create_session( async def batch_create_sessions( self, - request: spanner.BatchCreateSessionsRequest = None, + request: Union[spanner.BatchCreateSessionsRequest, dict] = None, *, database: str = None, session_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.BatchCreateSessionsResponse: @@ -277,7 +279,7 @@ async def batch_create_sessions( practices on session cache management. Args: - request (:class:`google.cloud.spanner_v1.types.BatchCreateSessionsRequest`): + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): The request object. The request for [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. database (:class:`str`): @@ -361,10 +363,10 @@ async def batch_create_sessions( async def get_session( self, - request: spanner.GetSessionRequest = None, + request: Union[spanner.GetSessionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.Session: @@ -373,7 +375,7 @@ async def get_session( is still alive. Args: - request (:class:`google.cloud.spanner_v1.types.GetSessionRequest`): + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): The request object. The request for [GetSession][google.spanner.v1.Spanner.GetSession]. name (:class:`str`): @@ -441,17 +443,17 @@ async def get_session( async def list_sessions( self, - request: spanner.ListSessionsRequest = None, + request: Union[spanner.ListSessionsRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSessionsAsyncPager: r"""Lists all sessions in a given database. Args: - request (:class:`google.cloud.spanner_v1.types.ListSessionsRequest`): + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): The request object. The request for [ListSessions][google.spanner.v1.Spanner.ListSessions]. database (:class:`str`): @@ -530,10 +532,10 @@ async def list_sessions( async def delete_session( self, - request: spanner.DeleteSessionRequest = None, + request: Union[spanner.DeleteSessionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -542,7 +544,7 @@ async def delete_session( of any operations that are running with this session. Args: - request (:class:`google.cloud.spanner_v1.types.DeleteSessionRequest`): + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): The request object. The request for [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. name (:class:`str`): @@ -605,9 +607,9 @@ async def delete_session( async def execute_sql( self, - request: spanner.ExecuteSqlRequest = None, + request: Union[spanner.ExecuteSqlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> result_set.ResultSet: @@ -627,7 +629,7 @@ async def execute_sql( instead. Args: - request (:class:`google.cloud.spanner_v1.types.ExecuteSqlRequest`): + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): The request object. The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. @@ -677,9 +679,9 @@ async def execute_sql( def execute_streaming_sql( self, - request: spanner.ExecuteSqlRequest = None, + request: Union[spanner.ExecuteSqlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: @@ -691,7 +693,7 @@ def execute_streaming_sql( column value can exceed 10 MiB. Args: - request (:class:`google.cloud.spanner_v1.types.ExecuteSqlRequest`): + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): The request object. The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. @@ -735,9 +737,9 @@ def execute_streaming_sql( async def execute_batch_dml( self, - request: spanner.ExecuteBatchDmlRequest = None, + request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.ExecuteBatchDmlResponse: @@ -757,7 +759,7 @@ async def execute_batch_dml( statements are not executed. Args: - request (:class:`google.cloud.spanner_v1.types.ExecuteBatchDmlRequest`): + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): The request object. The request for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -843,9 +845,9 @@ async def execute_batch_dml( async def read( self, - request: spanner.ReadRequest = None, + request: Union[spanner.ReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> result_set.ResultSet: @@ -866,7 +868,7 @@ async def read( instead. Args: - request (:class:`google.cloud.spanner_v1.types.ReadRequest`): + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): The request object. The request for [Read][google.spanner.v1.Spanner.Read] and [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. @@ -916,9 +918,9 @@ async def read( def streaming_read( self, - request: spanner.ReadRequest = None, + request: Union[spanner.ReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: @@ -930,7 +932,7 @@ def streaming_read( exceed 10 MiB. Args: - request (:class:`google.cloud.spanner_v1.types.ReadRequest`): + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): The request object. The request for [Read][google.spanner.v1.Spanner.Read] and [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. @@ -974,11 +976,11 @@ def streaming_read( async def begin_transaction( self, - request: spanner.BeginTransactionRequest = None, + request: Union[spanner.BeginTransactionRequest, dict] = None, *, session: str = None, options: transaction.TransactionOptions = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> transaction.Transaction: @@ -989,7 +991,7 @@ async def begin_transaction( transaction as a side-effect. Args: - request (:class:`google.cloud.spanner_v1.types.BeginTransactionRequest`): + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): The request object. The request for [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. session (:class:`str`): @@ -1066,13 +1068,13 @@ async def begin_transaction( async def commit( self, - request: spanner.CommitRequest = None, + request: Union[spanner.CommitRequest, dict] = None, *, session: str = None, transaction_id: bytes = None, mutations: Sequence[mutation.Mutation] = None, single_use_transaction: transaction.TransactionOptions = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> commit_response.CommitResponse: @@ -1094,7 +1096,7 @@ async def commit( things as they are now. Args: - request (:class:`google.cloud.spanner_v1.types.CommitRequest`): + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): The request object. The request for [Commit][google.spanner.v1.Spanner.Commit]. session (:class:`str`): @@ -1203,11 +1205,11 @@ async def commit( async def rollback( self, - request: spanner.RollbackRequest = None, + request: Union[spanner.RollbackRequest, dict] = None, *, session: str = None, transaction_id: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1223,7 +1225,7 @@ async def rollback( ``ABORTED``. Args: - request (:class:`google.cloud.spanner_v1.types.RollbackRequest`): + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): The request object. The request for [Rollback][google.spanner.v1.Spanner.Rollback]. session (:class:`str`): @@ -1295,9 +1297,9 @@ async def rollback( async def partition_query( self, - request: spanner.PartitionQueryRequest = None, + request: Union[spanner.PartitionQueryRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.PartitionResponse: @@ -1317,7 +1319,7 @@ async def partition_query( from the beginning. Args: - request (:class:`google.cloud.spanner_v1.types.PartitionQueryRequest`): + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): The request object. The request for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1367,9 +1369,9 @@ async def partition_query( async def partition_read( self, - request: spanner.PartitionReadRequest = None, + request: Union[spanner.PartitionReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.PartitionResponse: @@ -1392,7 +1394,7 @@ async def partition_read( from the beginning. Args: - request (:class:`google.cloud.spanner_v1.types.PartitionReadRequest`): + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): The request object. The request for [PartitionRead][google.spanner.v1.Spanner.PartitionRead] retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1440,6 +1442,12 @@ async def partition_read( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index 0acc775d60..2459208890 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Iterable, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Iterable, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.spanner_v1.services.spanner import pagers from google.cloud.spanner_v1.types import commit_response from google.cloud.spanner_v1.types import mutation @@ -368,18 +370,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def create_session( self, - request: spanner.CreateSessionRequest = None, + request: Union[spanner.CreateSessionRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.Session: @@ -404,7 +403,7 @@ def create_session( periodically, e.g., ``"SELECT 1"``. Args: - request (google.cloud.spanner_v1.types.CreateSessionRequest): + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): The request object. The request for [CreateSession][google.spanner.v1.Spanner.CreateSession]. database (str): @@ -463,11 +462,11 @@ def create_session( def batch_create_sessions( self, - request: spanner.BatchCreateSessionsRequest = None, + request: Union[spanner.BatchCreateSessionsRequest, dict] = None, *, database: str = None, session_count: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.BatchCreateSessionsResponse: @@ -477,7 +476,7 @@ def batch_create_sessions( practices on session cache management. Args: - request (google.cloud.spanner_v1.types.BatchCreateSessionsRequest): + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): The request object. The request for [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. database (str): @@ -552,10 +551,10 @@ def batch_create_sessions( def get_session( self, - request: spanner.GetSessionRequest = None, + request: Union[spanner.GetSessionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.Session: @@ -564,7 +563,7 @@ def get_session( is still alive. Args: - request (google.cloud.spanner_v1.types.GetSessionRequest): + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): The request object. The request for [GetSession][google.spanner.v1.Spanner.GetSession]. name (str): @@ -623,17 +622,17 @@ def get_session( def list_sessions( self, - request: spanner.ListSessionsRequest = None, + request: Union[spanner.ListSessionsRequest, dict] = None, *, database: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSessionsPager: r"""Lists all sessions in a given database. Args: - request (google.cloud.spanner_v1.types.ListSessionsRequest): + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): The request object. The request for [ListSessions][google.spanner.v1.Spanner.ListSessions]. database (str): @@ -703,10 +702,10 @@ def list_sessions( def delete_session( self, - request: spanner.DeleteSessionRequest = None, + request: Union[spanner.DeleteSessionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -715,7 +714,7 @@ def delete_session( of any operations that are running with this session. Args: - request (google.cloud.spanner_v1.types.DeleteSessionRequest): + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): The request object. The request for [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. name (str): @@ -769,9 +768,9 @@ def delete_session( def execute_sql( self, - request: spanner.ExecuteSqlRequest = None, + request: Union[spanner.ExecuteSqlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> result_set.ResultSet: @@ -791,7 +790,7 @@ def execute_sql( instead. Args: - request (google.cloud.spanner_v1.types.ExecuteSqlRequest): + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): The request object. The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. @@ -833,9 +832,9 @@ def execute_sql( def execute_streaming_sql( self, - request: spanner.ExecuteSqlRequest = None, + request: Union[spanner.ExecuteSqlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[result_set.PartialResultSet]: @@ -847,7 +846,7 @@ def execute_streaming_sql( column value can exceed 10 MiB. Args: - request (google.cloud.spanner_v1.types.ExecuteSqlRequest): + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): The request object. The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. @@ -892,9 +891,9 @@ def execute_streaming_sql( def execute_batch_dml( self, - request: spanner.ExecuteBatchDmlRequest = None, + request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.ExecuteBatchDmlResponse: @@ -914,7 +913,7 @@ def execute_batch_dml( statements are not executed. Args: - request (google.cloud.spanner_v1.types.ExecuteBatchDmlRequest): + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): The request object. The request for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -992,9 +991,9 @@ def execute_batch_dml( def read( self, - request: spanner.ReadRequest = None, + request: Union[spanner.ReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> result_set.ResultSet: @@ -1015,7 +1014,7 @@ def read( instead. Args: - request (google.cloud.spanner_v1.types.ReadRequest): + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): The request object. The request for [Read][google.spanner.v1.Spanner.Read] and [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. @@ -1057,9 +1056,9 @@ def read( def streaming_read( self, - request: spanner.ReadRequest = None, + request: Union[spanner.ReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[result_set.PartialResultSet]: @@ -1071,7 +1070,7 @@ def streaming_read( exceed 10 MiB. Args: - request (google.cloud.spanner_v1.types.ReadRequest): + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): The request object. The request for [Read][google.spanner.v1.Spanner.Read] and [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. @@ -1116,11 +1115,11 @@ def streaming_read( def begin_transaction( self, - request: spanner.BeginTransactionRequest = None, + request: Union[spanner.BeginTransactionRequest, dict] = None, *, session: str = None, options: transaction.TransactionOptions = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> transaction.Transaction: @@ -1131,7 +1130,7 @@ def begin_transaction( transaction as a side-effect. Args: - request (google.cloud.spanner_v1.types.BeginTransactionRequest): + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): The request object. The request for [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. session (str): @@ -1199,13 +1198,13 @@ def begin_transaction( def commit( self, - request: spanner.CommitRequest = None, + request: Union[spanner.CommitRequest, dict] = None, *, session: str = None, transaction_id: bytes = None, mutations: Sequence[mutation.Mutation] = None, single_use_transaction: transaction.TransactionOptions = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> commit_response.CommitResponse: @@ -1227,7 +1226,7 @@ def commit( things as they are now. Args: - request (google.cloud.spanner_v1.types.CommitRequest): + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): The request object. The request for [Commit][google.spanner.v1.Spanner.Commit]. session (str): @@ -1327,11 +1326,11 @@ def commit( def rollback( self, - request: spanner.RollbackRequest = None, + request: Union[spanner.RollbackRequest, dict] = None, *, session: str = None, transaction_id: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1347,7 +1346,7 @@ def rollback( ``ABORTED``. Args: - request (google.cloud.spanner_v1.types.RollbackRequest): + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): The request object. The request for [Rollback][google.spanner.v1.Spanner.Rollback]. session (str): @@ -1410,9 +1409,9 @@ def rollback( def partition_query( self, - request: spanner.PartitionQueryRequest = None, + request: Union[spanner.PartitionQueryRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.PartitionResponse: @@ -1432,7 +1431,7 @@ def partition_query( from the beginning. Args: - request (google.cloud.spanner_v1.types.PartitionQueryRequest): + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): The request object. The request for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1474,9 +1473,9 @@ def partition_query( def partition_read( self, - request: spanner.PartitionReadRequest = None, + request: Union[spanner.PartitionReadRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.PartitionResponse: @@ -1499,7 +1498,7 @@ def partition_read( from the beginning. Args: - request (google.cloud.spanner_v1.types.PartitionReadRequest): + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): The request object. The request for [PartitionRead][google.spanner.v1.Spanner.PartitionRead] retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1539,6 +1538,19 @@ def partition_read( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/spanner_v1/services/spanner/pagers.py b/google/cloud/spanner_v1/services/spanner/pagers.py index 4fea920f68..8b73b00fda 100644 --- a/google/cloud/spanner_v1/services/spanner/pagers.py +++ b/google/cloud/spanner_v1/services/spanner/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.spanner_v1.types import spanner @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[spanner.ListSessionsResponse]: + def pages(self) -> Iterator[spanner.ListSessionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[spanner.Session]: + def __iter__(self) -> Iterator[spanner.Session]: for page in self.pages: yield from page.sessions @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[spanner.ListSessionsResponse]: + async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[spanner.Session]: + def __aiter__(self) -> AsyncIterator[spanner.Session]: async def async_generator(): async for page in self.pages: for response in page.sessions: diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index d230d79bc1..be78feacde 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -39,15 +38,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SpannerTransport(abc.ABC): """Abstract transport class for Spanner.""" @@ -100,7 +90,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +112,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +123,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -351,6 +318,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def create_session( self, diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index 66e9227290..48d8d28e97 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -86,16 +86,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -749,5 +749,8 @@ def partition_read( ) return self._stubs["partition_read"] + def close(self): + self.grpc_channel.close() + __all__ = ("SpannerGrpcTransport",) diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index ad78c2325e..d7a2373ca8 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -133,16 +132,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -765,5 +764,8 @@ def partition_read( ) return self._stubs["partition_read"] + def close(self): + return self.grpc_channel.close() + __all__ = ("SpannerGrpcAsyncIOTransport",) diff --git a/google/cloud/spanner_v1/types/commit_response.py b/google/cloud/spanner_v1/types/commit_response.py index 1d20714bbd..1c9ccab0e8 100644 --- a/google/cloud/spanner_v1/types/commit_response.py +++ b/google/cloud/spanner_v1/types/commit_response.py @@ -23,6 +23,7 @@ class CommitResponse(proto.Message): r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. + Attributes: commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): The Cloud Spanner timestamp at which the @@ -35,6 +36,7 @@ class CommitResponse(proto.Message): class CommitStats(proto.Message): r"""Additional statistics about a commit. + Attributes: mutation_count (int): The total number of mutations for the transaction. Knowing diff --git a/google/cloud/spanner_v1/types/keys.py b/google/cloud/spanner_v1/types/keys.py index 7c4f094aa2..139c8887ed 100644 --- a/google/cloud/spanner_v1/types/keys.py +++ b/google/cloud/spanner_v1/types/keys.py @@ -135,22 +135,33 @@ class KeyRange(proto.Message): Note that 100 is passed as the start, and 1 is passed as the end, because ``Key`` is a descending column in the schema. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: start_closed (google.protobuf.struct_pb2.ListValue): If the start is closed, then the range includes all rows whose first ``len(start_closed)`` key columns exactly match ``start_closed``. + This field is a member of `oneof`_ ``start_key_type``. start_open (google.protobuf.struct_pb2.ListValue): If the start is open, then the range excludes rows whose first ``len(start_open)`` key columns exactly match ``start_open``. + This field is a member of `oneof`_ ``start_key_type``. end_closed (google.protobuf.struct_pb2.ListValue): If the end is closed, then the range includes all rows whose first ``len(end_closed)`` key columns exactly match ``end_closed``. + This field is a member of `oneof`_ ``end_key_type``. end_open (google.protobuf.struct_pb2.ListValue): If the end is open, then the range excludes rows whose first ``len(end_open)`` key columns exactly match ``end_open``. + This field is a member of `oneof`_ ``end_key_type``. """ start_closed = proto.Field( diff --git a/google/cloud/spanner_v1/types/mutation.py b/google/cloud/spanner_v1/types/mutation.py index 632f77eaaf..6add075409 100644 --- a/google/cloud/spanner_v1/types/mutation.py +++ b/google/cloud/spanner_v1/types/mutation.py @@ -27,15 +27,24 @@ class Mutation(proto.Message): applied to a Cloud Spanner database by sending them in a [Commit][google.spanner.v1.Spanner.Commit] call. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: insert (google.cloud.spanner_v1.types.Mutation.Write): Insert new rows in a table. If any of the rows already exist, the write or transaction fails with error ``ALREADY_EXISTS``. + This field is a member of `oneof`_ ``operation``. update (google.cloud.spanner_v1.types.Mutation.Write): Update existing rows in a table. If any of the rows does not already exist, the transaction fails with error ``NOT_FOUND``. + This field is a member of `oneof`_ ``operation``. insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): Like [insert][google.spanner.v1.Mutation.insert], except that if the row already exists, then its column values are @@ -49,6 +58,7 @@ class Mutation(proto.Message): ``NOT NULL`` columns in the table must be given a value. This holds true even when the row already exists and will therefore actually be updated. + This field is a member of `oneof`_ ``operation``. replace (google.cloud.spanner_v1.types.Mutation.Write): Like [insert][google.spanner.v1.Mutation.insert], except that if the row already exists, it is deleted, and the @@ -61,9 +71,11 @@ class Mutation(proto.Message): the ``ON DELETE CASCADE`` annotation, then replacing a parent row also deletes the child rows. Otherwise, you must delete the child rows before you replace the parent row. + This field is a member of `oneof`_ ``operation``. delete (google.cloud.spanner_v1.types.Mutation.Delete): Delete rows from a table. Succeeds whether or not the named rows were present. + This field is a member of `oneof`_ ``operation``. """ class Write(proto.Message): @@ -107,6 +119,7 @@ class Write(proto.Message): class Delete(proto.Message): r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. + Attributes: table (str): Required. The table whose rows will be diff --git a/google/cloud/spanner_v1/types/result_set.py b/google/cloud/spanner_v1/types/result_set.py index 2b2cad1451..ad9f12b583 100644 --- a/google/cloud/spanner_v1/types/result_set.py +++ b/google/cloud/spanner_v1/types/result_set.py @@ -216,6 +216,13 @@ class ResultSetStats(proto.Message): [ResultSet][google.spanner.v1.ResultSet] or [PartialResultSet][google.spanner.v1.PartialResultSet]. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: query_plan (google.cloud.spanner_v1.types.QueryPlan): [QueryPlan][google.spanner.v1.QueryPlan] for the query @@ -235,10 +242,12 @@ class ResultSetStats(proto.Message): row_count_exact (int): Standard DML returns an exact count of rows that were modified. + This field is a member of `oneof`_ ``row_count``. row_count_lower_bound (int): Partitioned DML does not offer exactly-once semantics, so it returns a lower bound of the rows modified. + This field is a member of `oneof`_ ``row_count``. """ query_plan = proto.Field(proto.MESSAGE, number=1, message=gs_query_plan.QueryPlan,) diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index bbfd28af92..e80fff9388 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -109,6 +109,7 @@ class BatchCreateSessionsResponse(proto.Message): class Session(proto.Message): r"""A session in the Cloud Spanner API. + Attributes: name (str): Output only. The name of the session. This is @@ -146,6 +147,7 @@ class Session(proto.Message): class GetSessionRequest(proto.Message): r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. + Attributes: name (str): Required. The name of the session to @@ -227,6 +229,7 @@ class DeleteSessionRequest(proto.Message): class RequestOptions(proto.Message): r"""Common request options for various APIs. + Attributes: priority (google.cloud.spanner_v1.types.RequestOptions.Priority): Priority for the request. @@ -389,6 +392,7 @@ class QueryMode(proto.Enum): class QueryOptions(proto.Message): r"""Query optimizer configuration. + Attributes: optimizer_version (str): An option to control the selection of optimizer version. @@ -507,6 +511,7 @@ class ExecuteBatchDmlRequest(proto.Message): class Statement(proto.Message): r"""A single DML statement. + Attributes: sql (str): Required. The DML string. @@ -930,12 +935,21 @@ class BeginTransactionRequest(proto.Message): class CommitRequest(proto.Message): r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: session (str): Required. The session in which the transaction to be committed is running. transaction_id (bytes): Commit a previously-started transaction. + This field is a member of `oneof`_ ``transaction``. single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): Execute mutations in a temporary transaction. Note that unlike commit of a previously-started transaction, commit @@ -946,6 +960,7 @@ class CommitRequest(proto.Message): are executed more than once. If this is undesirable, use [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] and [Commit][google.spanner.v1.Spanner.Commit] instead. + This field is a member of `oneof`_ ``transaction``. mutations (Sequence[google.cloud.spanner_v1.types.Mutation]): The mutations to be executed when this transaction commits. All mutations are applied @@ -975,6 +990,7 @@ class CommitRequest(proto.Message): class RollbackRequest(proto.Message): r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. + Attributes: session (str): Required. The session in which the diff --git a/google/cloud/spanner_v1/types/transaction.py b/google/cloud/spanner_v1/types/transaction.py index 42c71f65d1..c5a10a0b8d 100644 --- a/google/cloud/spanner_v1/types/transaction.py +++ b/google/cloud/spanner_v1/types/transaction.py @@ -303,6 +303,13 @@ class TransactionOptions(proto.Message): database-wide, operations that are idempotent, such as deleting old rows from a very large table. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): Transaction may write. @@ -310,6 +317,7 @@ class TransactionOptions(proto.Message): Authorization to begin a read-write transaction requires ``spanner.databases.beginOrRollbackReadWriteTransaction`` permission on the ``session`` resource. + This field is a member of `oneof`_ ``mode``. partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): Partitioned DML transaction. @@ -317,29 +325,41 @@ class TransactionOptions(proto.Message): requires ``spanner.databases.beginPartitionedDmlTransaction`` permission on the ``session`` resource. + This field is a member of `oneof`_ ``mode``. read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): Transaction will not write. Authorization to begin a read-only transaction requires ``spanner.databases.beginReadOnlyTransaction`` permission on the ``session`` resource. + This field is a member of `oneof`_ ``mode``. """ class ReadWrite(proto.Message): r"""Message type to initiate a read-write transaction. Currently this transaction type has no options. - """ + + """ class PartitionedDml(proto.Message): - r"""Message type to initiate a Partitioned DML transaction. """ + r"""Message type to initiate a Partitioned DML transaction. + """ class ReadOnly(proto.Message): r"""Message type to initiate a read-only transaction. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: strong (bool): Read at a timestamp where all previously committed transactions are visible. + This field is a member of `oneof`_ ``timestamp_bound``. min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): Executes all reads at a timestamp >= ``min_read_timestamp``. @@ -353,6 +373,7 @@ class ReadOnly(proto.Message): A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + This field is a member of `oneof`_ ``timestamp_bound``. max_staleness (google.protobuf.duration_pb2.Duration): Read data at a timestamp >= ``NOW - max_staleness`` seconds. Guarantees that all writes that have committed more than the @@ -367,6 +388,7 @@ class ReadOnly(proto.Message): Note that this option can only be used in single-use transactions. + This field is a member of `oneof`_ ``timestamp_bound``. read_timestamp (google.protobuf.timestamp_pb2.Timestamp): Executes all reads at the given timestamp. Unlike other modes, reads at a specific timestamp are repeatable; the @@ -380,6 +402,7 @@ class ReadOnly(proto.Message): A timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + This field is a member of `oneof`_ ``timestamp_bound``. exact_staleness (google.protobuf.duration_pb2.Duration): Executes all reads at a timestamp that is ``exact_staleness`` old. The timestamp is chosen soon after @@ -394,6 +417,7 @@ class ReadOnly(proto.Message): Useful for reading at nearby replicas without the distributed timestamp negotiation overhead of ``max_staleness``. + This field is a member of `oneof`_ ``timestamp_bound``. return_read_timestamp (bool): If true, the Cloud Spanner-selected read timestamp is included in the [Transaction][google.spanner.v1.Transaction] @@ -470,21 +494,31 @@ class TransactionSelector(proto.Message): See [TransactionOptions][google.spanner.v1.TransactionOptions] for more information about transactions. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: single_use (google.cloud.spanner_v1.types.TransactionOptions): Execute the read or SQL query in a temporary transaction. This is the most efficient way to execute a transaction that consists of a single SQL query. + This field is a member of `oneof`_ ``selector``. id (bytes): Execute the read or SQL query in a previously-started transaction. + This field is a member of `oneof`_ ``selector``. begin (google.cloud.spanner_v1.types.TransactionOptions): Begin a new transaction and execute this read or SQL query in it. The transaction ID of the new transaction is returned in [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], which is a [Transaction][google.spanner.v1.Transaction]. + This field is a member of `oneof`_ ``selector``. """ single_use = proto.Field( diff --git a/google/cloud/spanner_v1/types/type.py b/google/cloud/spanner_v1/types/type.py index 42754d974c..2c00626c7a 100644 --- a/google/cloud/spanner_v1/types/type.py +++ b/google/cloud/spanner_v1/types/type.py @@ -86,6 +86,7 @@ class StructType(proto.Message): class Field(proto.Message): r"""Message representing a single field of a struct. + Attributes: name (str): The name of the field. For reads, this is the column name. diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc deleted file mode 100644 index 98857dd0c9..0000000000 --- a/owl-bot-staging/spanner/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in deleted file mode 100644 index 2bde59af4d..0000000000 --- a/owl-bot-staging/spanner/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/spanner *.py -recursive-include google/cloud/spanner_v1 *.py diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst deleted file mode 100644 index d6abf98111..0000000000 --- a/owl-bot-staging/spanner/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Spanner API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py deleted file mode 100644 index c87b4a6a95..0000000000 --- a/owl-bot-staging/spanner/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-spanner.tex", - u"google-cloud-spanner Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-spanner", - u"Google Cloud Spanner Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-spanner", - u"google-cloud-spanner Documentation", - author, - "google-cloud-spanner", - "GAPIC library for Google Cloud Spanner API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst deleted file mode 100644 index ee873ac9c1..0000000000 --- a/owl-bot-staging/spanner/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_v1/services - spanner_v1/types diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst deleted file mode 100644 index 3bbbb55f79..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner v1 API -======================================== -.. toctree:: - :maxdepth: 2 - - spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst deleted file mode 100644 index b51f4447e4..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst +++ /dev/null @@ -1,10 +0,0 @@ -Spanner -------------------------- - -.. automodule:: google.cloud.spanner_v1.services.spanner - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_v1.services.spanner.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst deleted file mode 100644 index 8678aba188..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Spanner v1 API -===================================== - -.. automodule:: google.cloud.spanner_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py deleted file mode 100644 index 6a24892b8b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.spanner_v1.services.spanner.client import SpannerClient -from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient - -from google.cloud.spanner_v1.types.commit_response import CommitResponse -from google.cloud.spanner_v1.types.keys import KeyRange -from google.cloud.spanner_v1.types.keys import KeySet -from google.cloud.spanner_v1.types.mutation import Mutation -from google.cloud.spanner_v1.types.query_plan import PlanNode -from google.cloud.spanner_v1.types.query_plan import QueryPlan -from google.cloud.spanner_v1.types.result_set import PartialResultSet -from google.cloud.spanner_v1.types.result_set import ResultSet -from google.cloud.spanner_v1.types.result_set import ResultSetMetadata -from google.cloud.spanner_v1.types.result_set import ResultSetStats -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse -from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest -from google.cloud.spanner_v1.types.spanner import CommitRequest -from google.cloud.spanner_v1.types.spanner import CreateSessionRequest -from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse -from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest -from google.cloud.spanner_v1.types.spanner import GetSessionRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsResponse -from google.cloud.spanner_v1.types.spanner import Partition -from google.cloud.spanner_v1.types.spanner import PartitionOptions -from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest -from google.cloud.spanner_v1.types.spanner import PartitionReadRequest -from google.cloud.spanner_v1.types.spanner import PartitionResponse -from google.cloud.spanner_v1.types.spanner import ReadRequest -from google.cloud.spanner_v1.types.spanner import RequestOptions -from google.cloud.spanner_v1.types.spanner import RollbackRequest -from google.cloud.spanner_v1.types.spanner import Session -from google.cloud.spanner_v1.types.transaction import Transaction -from google.cloud.spanner_v1.types.transaction import TransactionOptions -from google.cloud.spanner_v1.types.transaction import TransactionSelector -from google.cloud.spanner_v1.types.type import StructType -from google.cloud.spanner_v1.types.type import Type -from google.cloud.spanner_v1.types.type import TypeCode - -__all__ = ('SpannerClient', - 'SpannerAsyncClient', - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py deleted file mode 100644 index 16e8c9c6f1..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.spanner import SpannerClient -from .services.spanner import SpannerAsyncClient - -from .types.commit_response import CommitResponse -from .types.keys import KeyRange -from .types.keys import KeySet -from .types.mutation import Mutation -from .types.query_plan import PlanNode -from .types.query_plan import QueryPlan -from .types.result_set import PartialResultSet -from .types.result_set import ResultSet -from .types.result_set import ResultSetMetadata -from .types.result_set import ResultSetStats -from .types.spanner import BatchCreateSessionsRequest -from .types.spanner import BatchCreateSessionsResponse -from .types.spanner import BeginTransactionRequest -from .types.spanner import CommitRequest -from .types.spanner import CreateSessionRequest -from .types.spanner import DeleteSessionRequest -from .types.spanner import ExecuteBatchDmlRequest -from .types.spanner import ExecuteBatchDmlResponse -from .types.spanner import ExecuteSqlRequest -from .types.spanner import GetSessionRequest -from .types.spanner import ListSessionsRequest -from .types.spanner import ListSessionsResponse -from .types.spanner import Partition -from .types.spanner import PartitionOptions -from .types.spanner import PartitionQueryRequest -from .types.spanner import PartitionReadRequest -from .types.spanner import PartitionResponse -from .types.spanner import ReadRequest -from .types.spanner import RequestOptions -from .types.spanner import RollbackRequest -from .types.spanner import Session -from .types.transaction import Transaction -from .types.transaction import TransactionOptions -from .types.transaction import TransactionSelector -from .types.type import StructType -from .types.type import Type -from .types.type import TypeCode - -__all__ = ( - 'SpannerAsyncClient', -'BatchCreateSessionsRequest', -'BatchCreateSessionsResponse', -'BeginTransactionRequest', -'CommitRequest', -'CommitResponse', -'CreateSessionRequest', -'DeleteSessionRequest', -'ExecuteBatchDmlRequest', -'ExecuteBatchDmlResponse', -'ExecuteSqlRequest', -'GetSessionRequest', -'KeyRange', -'KeySet', -'ListSessionsRequest', -'ListSessionsResponse', -'Mutation', -'PartialResultSet', -'Partition', -'PartitionOptions', -'PartitionQueryRequest', -'PartitionReadRequest', -'PartitionResponse', -'PlanNode', -'QueryPlan', -'ReadRequest', -'RequestOptions', -'ResultSet', -'ResultSetMetadata', -'ResultSetStats', -'RollbackRequest', -'Session', -'SpannerClient', -'StructType', -'Transaction', -'TransactionOptions', -'TransactionSelector', -'Type', -'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json deleted file mode 100644 index a6b16725c3..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json +++ /dev/null @@ -1,173 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_v1", - "protoPackage": "google.spanner.v1", - "schema": "1.0", - "services": { - "Spanner": { - "clients": { - "grpc": { - "libraryClient": "SpannerClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SpannerAsyncClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py deleted file mode 100644 index 4de65971c2..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py deleted file mode 100644 index a0fb0c707d..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SpannerClient -from .async_client import SpannerAsyncClient - -__all__ = ( - 'SpannerClient', - 'SpannerAsyncClient', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py deleted file mode 100644 index 2f65c8b008..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py +++ /dev/null @@ -1,1486 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport -from .client import SpannerClient - - -class SpannerAsyncClient: - """Cloud Spanner API - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - _client: SpannerClient - - DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT - - database_path = staticmethod(SpannerClient.database_path) - parse_database_path = staticmethod(SpannerClient.parse_database_path) - session_path = staticmethod(SpannerClient.session_path) - parse_session_path = staticmethod(SpannerClient.parse_session_path) - common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SpannerClient.common_folder_path) - parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) - common_organization_path = staticmethod(SpannerClient.common_organization_path) - parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) - common_project_path = staticmethod(SpannerClient.common_project_path) - parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) - common_location_path = staticmethod(SpannerClient.common_location_path) - parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(SpannerClient).get_transport_class, type(SpannerClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, SpannerTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.SpannerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SpannerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_session(self, - request: Union[spanner.CreateSessionRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it is a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner may delete sessions for which no - operations are sent for more than an hour. If a session is - deleted, requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, e.g., ``"SELECT 1"``. - - Args: - request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (:class:`str`): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.CreateSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_create_sessions(self, - request: Union[spanner.BatchCreateSessionsRequest, dict] = None, - *, - database: str = None, - session_count: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Args: - request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (:class:`str`): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (:class:`int`): - Required. The number of sessions to be created in this - batch call. The API may return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - BatchCreateSessions (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, session_count]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.BatchCreateSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_create_sessions, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_session(self, - request: Union[spanner.GetSessionRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session does not - exist. This is mainly useful for determining whether a session - is still alive. - - Args: - request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (:class:`str`): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.GetSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_sessions(self, - request: Union[spanner.ListSessionsRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSessionsAsyncPager: - r"""Lists all sessions in a given database. - - Args: - request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (:class:`str`): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.ListSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sessions, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSessionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_session(self, - request: Union[spanner.DeleteSessionRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This will asynchronously trigger cancellation - of any operations that are running with this session. - - Args: - request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (:class:`str`): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.DeleteSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def execute_sql(self, - request: Union[spanner.ExecuteSqlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method cannot be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.execute_sql, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Union[spanner.ExecuteSqlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.execute_streaming_sql, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def execute_batch_dml(self, - request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list - of [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. - 2. If the status was not OK, check the number of - result sets in the response. If the response - contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement - failed, and the fourth and fifth statements were - not executed. - - """ - # Create or coerce a protobuf request object. - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.execute_batch_dml, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def read(self, - request: Union[spanner.ReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - cannot be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for - [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.read, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Union[spanner.ReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for - [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.streaming_read, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction(self, - request: Union[spanner.BeginTransactionRequest, dict] = None, - *, - session: str = None, - options: transaction.TransactionOptions = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Args: - request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (:class:`str`): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, options]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.begin_transaction, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit(self, - request: Union[spanner.CommitRequest, dict] = None, - *, - session: str = None, - transaction_id: bytes = None, - mutations: Sequence[mutation.Mutation] = None, - single_use_transaction: transaction.TransactionOptions = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - re-attempt the transaction from the beginning, re-using the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Args: - request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (:class:`str`): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (:class:`Sequence[google.cloud.spanner_v1.types.Mutation]`): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it is - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id, mutations, single_use_transaction]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - if mutations: - request.mutations.extend(mutations) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback(self, - request: Union[spanner.RollbackRequest, dict] = None, - *, - session: str = None, - transaction_id: bytes = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It is a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction is not found. ``Rollback`` never returns - ``ABORTED``. - - Args: - request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (:class:`str`): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def partition_query(self, - request: Union[spanner.PartitionQueryRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - PartitionQueryRequest used to create the partition tokens and - the ExecuteSqlRequests that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.partition_query, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def partition_read(self, - request: Union[spanner.PartitionReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - PartitionReadRequest used to create the partition tokens and the - ReadRequests that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual StreamingRead - call issued with a partition_token. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.partition_read, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SpannerAsyncClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py deleted file mode 100644 index ca138e5b6c..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py +++ /dev/null @@ -1,1621 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Iterable, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SpannerGrpcTransport -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport - - -class SpannerClientMeta(type): - """Metaclass for the Spanner client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] - _transport_registry["grpc"] = SpannerGrpcTransport - _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[SpannerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SpannerClient(metaclass=SpannerClientMeta): - """Cloud Spanner API - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def session_path(project: str,instance: str,database: str,session: str,) -> str: - """Returns a fully-qualified session string.""" - return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - - @staticmethod - def parse_session_path(path: str) -> Dict[str,str]: - """Parses a session path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SpannerTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, SpannerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, SpannerTransport): - # transport is a SpannerTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def create_session(self, - request: Union[spanner.CreateSessionRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it is a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner may delete sessions for which no - operations are sent for more than an hour. If a session is - deleted, requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, e.g., ``"SELECT 1"``. - - Args: - request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (str): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.CreateSessionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.CreateSessionRequest): - request = spanner.CreateSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_create_sessions(self, - request: Union[spanner.BatchCreateSessionsRequest, dict] = None, - *, - database: str = None, - session_count: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Args: - request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (str): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (int): - Required. The number of sessions to be created in this - batch call. The API may return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - BatchCreateSessions (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, session_count]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.BatchCreateSessionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.BatchCreateSessionsRequest): - request = spanner.BatchCreateSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_session(self, - request: Union[spanner.GetSessionRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session does not - exist. This is mainly useful for determining whether a session - is still alive. - - Args: - request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (str): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.GetSessionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.GetSessionRequest): - request = spanner.GetSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_sessions(self, - request: Union[spanner.ListSessionsRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSessionsPager: - r"""Lists all sessions in a given database. - - Args: - request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (str): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ListSessionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ListSessionsRequest): - request = spanner.ListSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSessionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_session(self, - request: Union[spanner.DeleteSessionRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This will asynchronously trigger cancellation - of any operations that are running with this session. - - Args: - request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (str): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.DeleteSessionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.DeleteSessionRequest): - request = spanner.DeleteSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def execute_sql(self, - request: Union[spanner.ExecuteSqlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method cannot be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ExecuteSqlRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Union[spanner.ExecuteSqlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ExecuteSqlRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_batch_dml(self, - request: Union[spanner.ExecuteBatchDmlRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list - of [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. - 2. If the status was not OK, check the number of - result sets in the response. If the response - contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement - failed, and the fourth and fifth statements were - not executed. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ExecuteBatchDmlRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ExecuteBatchDmlRequest): - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def read(self, - request: Union[spanner.ReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - cannot be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for - [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ReadRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Union[spanner.ReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for - [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.ReadRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.streaming_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction(self, - request: Union[spanner.BeginTransactionRequest, dict] = None, - *, - session: str = None, - options: transaction.TransactionOptions = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Args: - request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (str): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, options]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.BeginTransactionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.BeginTransactionRequest): - request = spanner.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit(self, - request: Union[spanner.CommitRequest, dict] = None, - *, - session: str = None, - transaction_id: bytes = None, - mutations: Sequence[mutation.Mutation] = None, - single_use_transaction: transaction.TransactionOptions = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - re-attempt the transaction from the beginning, re-using the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Args: - request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (str): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (Sequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it is - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id, mutations, single_use_transaction]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.CommitRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.CommitRequest): - request = spanner.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if mutations is not None: - request.mutations = mutations - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback(self, - request: Union[spanner.RollbackRequest, dict] = None, - *, - session: str = None, - transaction_id: bytes = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It is a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction is not found. ``Rollback`` never returns - ``ABORTED``. - - Args: - request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (str): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([session, transaction_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner.RollbackRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.RollbackRequest): - request = spanner.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def partition_query(self, - request: Union[spanner.PartitionQueryRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - PartitionQueryRequest used to create the partition tokens and - the ExecuteSqlRequests that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.PartitionQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.PartitionQueryRequest): - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def partition_read(self, - request: Union[spanner.PartitionReadRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - PartitionReadRequest used to create the partition tokens and the - ReadRequests that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual StreamingRead - call issued with a partition_token. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a spanner.PartitionReadRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner.PartitionReadRequest): - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SpannerClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py deleted file mode 100644 index 20b368c231..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.spanner_v1.types import spanner - - -class ListSessionsPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner.ListSessionsResponse], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner.Session]: - for page in self.pages: - yield from page.sessions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsAsyncPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner.ListSessionsResponse]], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[spanner.Session]: - async def async_generator(): - async for page in self.pages: - for response in page.sessions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py deleted file mode 100644 index de8947cdb7..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SpannerTransport -from .grpc import SpannerGrpcTransport -from .grpc_asyncio import SpannerGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] -_transport_registry['grpc'] = SpannerGrpcTransport -_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport - -__all__ = ( - 'SpannerTransport', - 'SpannerGrpcTransport', - 'SpannerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py deleted file mode 100644 index 35b39925d1..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py +++ /dev/null @@ -1,425 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-spanner', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class SpannerTransport(abc.ABC): - """Abstract transport class for Spanner.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_session: gapic_v1.method.wrap_method( - self.create_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_create_sessions: gapic_v1.method.wrap_method( - self.batch_create_sessions, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_session: gapic_v1.method.wrap_method( - self.get_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_sessions: gapic_v1.method.wrap_method( - self.list_sessions, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_session: gapic_v1.method.wrap_method( - self.delete_session, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_sql: gapic_v1.method.wrap_method( - self.execute_sql, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_streaming_sql: gapic_v1.method.wrap_method( - self.execute_streaming_sql, - default_timeout=3600.0, - client_info=client_info, - ), - self.execute_batch_dml: gapic_v1.method.wrap_method( - self.execute_batch_dml, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.read: gapic_v1.method.wrap_method( - self.read, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.streaming_read: gapic_v1.method.wrap_method( - self.streaming_read, - default_timeout=3600.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_query: gapic_v1.method.wrap_method( - self.partition_query, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_read: gapic_v1.method.wrap_method( - self.partition_read, - default_retry=retries.Retry( -initial=0.25,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Union[ - spanner.BatchCreateSessionsResponse, - Awaitable[spanner.BatchCreateSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Union[ - spanner.ListSessionsResponse, - Awaitable[spanner.ListSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Union[ - spanner.ExecuteBatchDmlResponse, - Awaitable[spanner.ExecuteBatchDmlResponse] - ]]: - raise NotImplementedError() - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Union[ - transaction.Transaction, - Awaitable[transaction.Transaction] - ]]: - raise NotImplementedError() - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Union[ - commit_response.CommitResponse, - Awaitable[commit_response.CommitResponse] - ]]: - raise NotImplementedError() - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'SpannerTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py deleted file mode 100644 index c3a57b8481..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ /dev/null @@ -1,756 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO - - -class SpannerGrpcTransport(SpannerTransport): - """gRPC backend transport for Spanner. - - Cloud Spanner API - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - spanner.Session]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it is a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner may delete sessions for which no - operations are sent for more than an hour. If a session is - deleted, requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, e.g., ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - spanner.BatchCreateSessionsResponse]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - ~.BatchCreateSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - spanner.Session]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session does not - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - spanner.ListSessionsResponse]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - ~.ListSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This will asynchronously trigger cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.ResultSet]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method cannot be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.PartialResultSet]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self.grpc_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - spanner.ExecuteBatchDmlResponse]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - ~.ExecuteBatchDmlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - result_set.ResultSet]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - cannot be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - result_set.PartialResultSet]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self.grpc_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - transaction.Transaction]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.Transaction]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - commit_response.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - re-attempt the transaction from the beginning, re-using the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - empty_pb2.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It is a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction is not found. ``Rollback`` never returns - ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - PartitionQueryRequest used to create the partition tokens and - the ExecuteSqlRequests that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - PartitionReadRequest used to create the partition tokens and the - ReadRequests that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual StreamingRead - call issued with a partition_token. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - def close(self): - self.grpc_channel.close() - -__all__ = ( - 'SpannerGrpcTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py deleted file mode 100644 index 5935e8fe10..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ /dev/null @@ -1,760 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO -from .grpc import SpannerGrpcTransport - - -class SpannerGrpcAsyncIOTransport(SpannerTransport): - """gRPC AsyncIO backend transport for Spanner. - - Cloud Spanner API - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it is a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner may delete sessions for which no - operations are sent for more than an hour. If a session is - deleted, requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, e.g., ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Awaitable[spanner.BatchCreateSessionsResponse]]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - Awaitable[~.BatchCreateSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session does not - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Awaitable[spanner.ListSessionsResponse]]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - Awaitable[~.ListSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This will asynchronously trigger cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method cannot be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self.grpc_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Awaitable[spanner.ExecuteBatchDmlResponse]]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - Awaitable[~.ExecuteBatchDmlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - cannot be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self.grpc_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Awaitable[transaction.Transaction]]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.Transaction]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Awaitable[commit_response.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - re-attempt the transaction from the beginning, re-using the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It is a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction is not found. ``Rollback`` never returns - ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - PartitionQueryRequest used to create the partition tokens and - the ExecuteSqlRequests that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - PartitionReadRequest used to create the partition tokens and the - ReadRequests that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual StreamingRead - call issued with a partition_token. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it is not possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self.grpc_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'SpannerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py deleted file mode 100644 index 6a50fe1791..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .commit_response import ( - CommitResponse, -) -from .keys import ( - KeyRange, - KeySet, -) -from .mutation import ( - Mutation, -) -from .query_plan import ( - PlanNode, - QueryPlan, -) -from .result_set import ( - PartialResultSet, - ResultSet, - ResultSetMetadata, - ResultSetStats, -) -from .spanner import ( - BatchCreateSessionsRequest, - BatchCreateSessionsResponse, - BeginTransactionRequest, - CommitRequest, - CreateSessionRequest, - DeleteSessionRequest, - ExecuteBatchDmlRequest, - ExecuteBatchDmlResponse, - ExecuteSqlRequest, - GetSessionRequest, - ListSessionsRequest, - ListSessionsResponse, - Partition, - PartitionOptions, - PartitionQueryRequest, - PartitionReadRequest, - PartitionResponse, - ReadRequest, - RequestOptions, - RollbackRequest, - Session, -) -from .transaction import ( - Transaction, - TransactionOptions, - TransactionSelector, -) -from .type import ( - StructType, - Type, - TypeCode, -) - -__all__ = ( - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py deleted file mode 100644 index d0ee2dd830..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CommitResponse', - }, -) - - -class CommitResponse(proto.Message): - r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - The Cloud Spanner timestamp at which the - transaction committed. - commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): - The statistics about this Commit. Not returned by default. - For more information, see - [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. - """ - - class CommitStats(proto.Message): - r"""Additional statistics about a commit. - - Attributes: - mutation_count (int): - The total number of mutations for the transaction. Knowing - the ``mutation_count`` value can help you maximize the - number of mutations in a transaction and minimize the number - of API round trips. You can also monitor this value to - prevent transactions from exceeding the system - `limit `__. - If the number of mutations exceeds the limit, the server - returns - `INVALID_ARGUMENT `__. - """ - - mutation_count = proto.Field( - proto.INT64, - number=1, - ) - - commit_timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - commit_stats = proto.Field( - proto.MESSAGE, - number=2, - message=CommitStats, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py deleted file mode 100644 index c7c1681917..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py +++ /dev/null @@ -1,240 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'KeyRange', - 'KeySet', - }, -) - - -class KeyRange(proto.Message): - r"""KeyRange represents a range of rows in a table or index. - - A range has a start key and an end key. These keys can be open or - closed, indicating if the range includes rows with that key. - - Keys are represented by lists, where the ith value in the list - corresponds to the ith component of the table or index primary key. - Individual values are encoded as described - [here][google.spanner.v1.TypeCode]. - - For example, consider the following table definition: - - :: - - CREATE TABLE UserEvents ( - UserName STRING(MAX), - EventDate STRING(10) - ) PRIMARY KEY(UserName, EventDate); - - The following keys name rows in this table: - - :: - - ["Bob", "2014-09-23"] - ["Alfred", "2015-06-12"] - - Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two - columns, each ``UserEvents`` key has two elements; the first is the - ``UserName``, and the second is the ``EventDate``. - - Key ranges with multiple components are interpreted - lexicographically by component using the table or index key's - declared sort order. For example, the following range returns all - events for user ``"Bob"`` that occurred in the year 2015: - - :: - - "start_closed": ["Bob", "2015-01-01"] - "end_closed": ["Bob", "2015-12-31"] - - Start and end keys can omit trailing key components. This affects - the inclusion and exclusion of rows that exactly match the provided - key components: if the key is closed, then rows that exactly match - the provided components are included; if the key is open, then rows - that exactly match are not included. - - For example, the following range includes all events for ``"Bob"`` - that occurred during and after the year 2000: - - :: - - "start_closed": ["Bob", "2000-01-01"] - "end_closed": ["Bob"] - - The next example retrieves all events for ``"Bob"``: - - :: - - "start_closed": ["Bob"] - "end_closed": ["Bob"] - - To retrieve events before the year 2000: - - :: - - "start_closed": ["Bob"] - "end_open": ["Bob", "2000-01-01"] - - The following range includes all rows in the table: - - :: - - "start_closed": [] - "end_closed": [] - - This range returns all users whose ``UserName`` begins with any - character from A to C: - - :: - - "start_closed": ["A"] - "end_open": ["D"] - - This range returns all users whose ``UserName`` begins with B: - - :: - - "start_closed": ["B"] - "end_open": ["C"] - - Key ranges honor column sort order. For example, suppose a table is - defined as follows: - - :: - - CREATE TABLE DescendingSortedTable { - Key INT64, - ... - ) PRIMARY KEY(Key DESC); - - The following range retrieves all rows with key values between 1 and - 100 inclusive: - - :: - - "start_closed": ["100"] - "end_closed": ["1"] - - Note that 100 is passed as the start, and 1 is passed as the end, - because ``Key`` is a descending column in the schema. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - start_closed (google.protobuf.struct_pb2.ListValue): - If the start is closed, then the range includes all rows - whose first ``len(start_closed)`` key columns exactly match - ``start_closed``. - This field is a member of `oneof`_ ``start_key_type``. - start_open (google.protobuf.struct_pb2.ListValue): - If the start is open, then the range excludes rows whose - first ``len(start_open)`` key columns exactly match - ``start_open``. - This field is a member of `oneof`_ ``start_key_type``. - end_closed (google.protobuf.struct_pb2.ListValue): - If the end is closed, then the range includes all rows whose - first ``len(end_closed)`` key columns exactly match - ``end_closed``. - This field is a member of `oneof`_ ``end_key_type``. - end_open (google.protobuf.struct_pb2.ListValue): - If the end is open, then the range excludes rows whose first - ``len(end_open)`` key columns exactly match ``end_open``. - This field is a member of `oneof`_ ``end_key_type``. - """ - - start_closed = proto.Field( - proto.MESSAGE, - number=1, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - start_open = proto.Field( - proto.MESSAGE, - number=2, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - end_closed = proto.Field( - proto.MESSAGE, - number=3, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - end_open = proto.Field( - proto.MESSAGE, - number=4, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - - -class KeySet(proto.Message): - r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key - ranges. All the keys are expected to be in the same table or index. - The keys need not be sorted in any particular way. - - If the same key is specified multiple times in the set (for example - if two ranges, two keys, or a key and a range overlap), Cloud - Spanner behaves as if the key were only specified once. - - Attributes: - keys (Sequence[google.protobuf.struct_pb2.ListValue]): - A list of specific keys. Entries in ``keys`` should have - exactly as many elements as there are columns in the primary - or index key with which this ``KeySet`` is used. Individual - key values are encoded as described - [here][google.spanner.v1.TypeCode]. - ranges (Sequence[google.cloud.spanner_v1.types.KeyRange]): - A list of key ranges. See - [KeyRange][google.spanner.v1.KeyRange] for more information - about key range specifications. - all_ (bool): - For convenience ``all`` can be set to ``true`` to indicate - that this ``KeySet`` matches all keys in the table or index. - Note that any keys specified in ``keys`` or ``ranges`` are - only yielded once. - """ - - keys = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.ListValue, - ) - ranges = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='KeyRange', - ) - all_ = proto.Field( - proto.BOOL, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py deleted file mode 100644 index a58f1daa3c..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'Mutation', - }, -) - - -class Mutation(proto.Message): - r"""A modification to one or more Cloud Spanner rows. Mutations can be - applied to a Cloud Spanner database by sending them in a - [Commit][google.spanner.v1.Spanner.Commit] call. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - insert (google.cloud.spanner_v1.types.Mutation.Write): - Insert new rows in a table. If any of the rows already - exist, the write or transaction fails with error - ``ALREADY_EXISTS``. - This field is a member of `oneof`_ ``operation``. - update (google.cloud.spanner_v1.types.Mutation.Write): - Update existing rows in a table. If any of the rows does not - already exist, the transaction fails with error - ``NOT_FOUND``. - This field is a member of `oneof`_ ``operation``. - insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, then its column values are - overwritten with the ones provided. Any column values not - explicitly written are preserved. - - When using - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - just as when using - [insert][google.spanner.v1.Mutation.insert], all - ``NOT NULL`` columns in the table must be given a value. - This holds true even when the row already exists and will - therefore actually be updated. - This field is a member of `oneof`_ ``operation``. - replace (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, it is deleted, and the - column values provided are inserted instead. Unlike - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - this means any values not explicitly written become - ``NULL``. - - In an interleaved table, if you create the child table with - the ``ON DELETE CASCADE`` annotation, then replacing a - parent row also deletes the child rows. Otherwise, you must - delete the child rows before you replace the parent row. - This field is a member of `oneof`_ ``operation``. - delete (google.cloud.spanner_v1.types.Mutation.Delete): - Delete rows from a table. Succeeds whether or - not the named rows were present. - This field is a member of `oneof`_ ``operation``. - """ - - class Write(proto.Message): - r"""Arguments to [insert][google.spanner.v1.Mutation.insert], - [update][google.spanner.v1.Mutation.update], - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and - [replace][google.spanner.v1.Mutation.replace] operations. - - Attributes: - table (str): - Required. The table whose rows will be - written. - columns (Sequence[str]): - The names of the columns in - [table][google.spanner.v1.Mutation.Write.table] to be - written. - - The list of columns must contain enough columns to allow - Cloud Spanner to derive values for all primary key columns - in the row(s) to be modified. - values (Sequence[google.protobuf.struct_pb2.ListValue]): - The values to be written. ``values`` can contain more than - one list of values. If it does, then multiple rows are - written, one for each entry in ``values``. Each list in - ``values`` must have exactly as many entries as there are - entries in - [columns][google.spanner.v1.Mutation.Write.columns] above. - Sending multiple lists is equivalent to sending multiple - ``Mutation``\ s, each containing one ``values`` entry and - repeating [table][google.spanner.v1.Mutation.Write.table] - and [columns][google.spanner.v1.Mutation.Write.columns]. - Individual values in each list are encoded as described - [here][google.spanner.v1.TypeCode]. - """ - - table = proto.Field( - proto.STRING, - number=1, - ) - columns = proto.RepeatedField( - proto.STRING, - number=2, - ) - values = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct_pb2.ListValue, - ) - - class Delete(proto.Message): - r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. - - Attributes: - table (str): - Required. The table whose rows will be - deleted. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. The primary keys of the rows within - [table][google.spanner.v1.Mutation.Delete.table] to delete. - The primary keys must be specified in the order in which - they appear in the ``PRIMARY KEY()`` clause of the table's - equivalent DDL statement (the DDL statement used to create - the table). Delete is idempotent. The transaction will - succeed even if some or all rows do not exist. - """ - - table = proto.Field( - proto.STRING, - number=1, - ) - key_set = proto.Field( - proto.MESSAGE, - number=2, - message=keys.KeySet, - ) - - insert = proto.Field( - proto.MESSAGE, - number=1, - oneof='operation', - message=Write, - ) - update = proto.Field( - proto.MESSAGE, - number=2, - oneof='operation', - message=Write, - ) - insert_or_update = proto.Field( - proto.MESSAGE, - number=3, - oneof='operation', - message=Write, - ) - replace = proto.Field( - proto.MESSAGE, - number=4, - oneof='operation', - message=Write, - ) - delete = proto.Field( - proto.MESSAGE, - number=5, - oneof='operation', - message=Delete, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py deleted file mode 100644 index 9df8081f1d..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py +++ /dev/null @@ -1,200 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'PlanNode', - 'QueryPlan', - }, -) - - -class PlanNode(proto.Message): - r"""Node information for nodes appearing in a - [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. - - Attributes: - index (int): - The ``PlanNode``'s index in [node - list][google.spanner.v1.QueryPlan.plan_nodes]. - kind (google.cloud.spanner_v1.types.PlanNode.Kind): - Used to determine the type of node. May be needed for - visualizing different kinds of nodes differently. For - example, If the node is a - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it - will have a condensed representation which can be used to - directly embed a description of the node in its parent. - display_name (str): - The display name for the node. - child_links (Sequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): - List of child node ``index``\ es and their relationship to - this parent. - short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): - Condensed representation for - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. - metadata (google.protobuf.struct_pb2.Struct): - Attributes relevant to the node contained in a group of - key-value pairs. For example, a Parameter Reference node - could have the following information in its metadata: - - :: - - { - "parameter_reference": "param1", - "parameter_type": "array" - } - execution_stats (google.protobuf.struct_pb2.Struct): - The execution statistics associated with the - node, contained in a group of key-value pairs. - Only present if the plan was returned as a - result of a profile query. For example, number - of executions, number of rows/time per execution - etc. - """ - class Kind(proto.Enum): - r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes - between the two different kinds of nodes that can appear in a query - plan. - """ - KIND_UNSPECIFIED = 0 - RELATIONAL = 1 - SCALAR = 2 - - class ChildLink(proto.Message): - r"""Metadata associated with a parent-child relationship appearing in a - [PlanNode][google.spanner.v1.PlanNode]. - - Attributes: - child_index (int): - The node to which the link points. - type_ (str): - The type of the link. For example, in Hash - Joins this could be used to distinguish between - the build child and the probe child, or in the - case of the child being an output variable, to - represent the tag associated with the output - variable. - variable (str): - Only present if the child node is - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and - corresponds to an output variable of the parent node. The - field carries the name of the output variable. For example, - a ``TableScan`` operator that reads rows from a table will - have child links to the ``SCALAR`` nodes representing the - output variables created for each column that is read by the - operator. The corresponding ``variable`` fields will be set - to the variable names assigned to the columns. - """ - - child_index = proto.Field( - proto.INT32, - number=1, - ) - type_ = proto.Field( - proto.STRING, - number=2, - ) - variable = proto.Field( - proto.STRING, - number=3, - ) - - class ShortRepresentation(proto.Message): - r"""Condensed representation of a node and its subtree. Only present for - ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. - - Attributes: - description (str): - A string representation of the expression - subtree rooted at this node. - subqueries (Sequence[google.cloud.spanner_v1.types.PlanNode.ShortRepresentation.SubqueriesEntry]): - A mapping of (subquery variable name) -> (subquery node id) - for cases where the ``description`` string of this node - references a ``SCALAR`` subquery contained in the expression - subtree rooted at this node. The referenced ``SCALAR`` - subquery may not necessarily be a direct child of this node. - """ - - description = proto.Field( - proto.STRING, - number=1, - ) - subqueries = proto.MapField( - proto.STRING, - proto.INT32, - number=2, - ) - - index = proto.Field( - proto.INT32, - number=1, - ) - kind = proto.Field( - proto.ENUM, - number=2, - enum=Kind, - ) - display_name = proto.Field( - proto.STRING, - number=3, - ) - child_links = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=ChildLink, - ) - short_representation = proto.Field( - proto.MESSAGE, - number=5, - message=ShortRepresentation, - ) - metadata = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - execution_stats = proto.Field( - proto.MESSAGE, - number=7, - message=struct_pb2.Struct, - ) - - -class QueryPlan(proto.Message): - r"""Contains an ordered list of nodes appearing in the query - plan. - - Attributes: - plan_nodes (Sequence[google.cloud.spanner_v1.types.PlanNode]): - The nodes in the query plan. Plan nodes are returned in - pre-order starting with the plan root. Each - [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds - to its index in ``plan_nodes``. - """ - - plan_nodes = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PlanNode', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py deleted file mode 100644 index 27371dc89a..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py +++ /dev/null @@ -1,316 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.spanner_v1.types import query_plan as gs_query_plan -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'ResultSet', - 'PartialResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - }, -) - - -class ResultSet(proto.Message): - r"""Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. - rows (Sequence[google.protobuf.struct_pb2.ListValue]): - Each element in ``rows`` is a row whose format is defined by - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - The ith element in each row matches the ith field in - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - Elements are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the SQL statement - that produced this result set. These can be requested by - setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - DML statements always produce stats containing the number of - rows modified, unless executed using the - [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - Other fields may or may not be populated, based on the - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - """ - - metadata = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - rows = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.ListValue, - ) - stats = proto.Field( - proto.MESSAGE, - number=3, - message='ResultSetStats', - ) - - -class PartialResultSet(proto.Message): - r"""Partial results from a streaming read or SQL query. Streaming - reads and SQL queries better tolerate large result sets, large - rows, and large values, but are a little trickier to consume. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. Only present in the first - response. - values (Sequence[google.protobuf.struct_pb2.Value]): - A streamed result set consists of a stream of values, which - might be split into many ``PartialResultSet`` messages to - accommodate large rows and/or large values. Every N complete - values defines a row, where N is equal to the number of - entries in - [metadata.row_type.fields][google.spanner.v1.StructType.fields]. - - Most values are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - - It is possible that the last value in values is "chunked", - meaning that the rest of the value is sent in subsequent - ``PartialResultSet``\ (s). This is denoted by the - [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] - field. Two or more chunked values can be merged to form a - complete value as follows: - - - ``bool/number/null``: cannot be chunked - - ``string``: concatenate the strings - - ``list``: concatenate the lists. If the last element in a - list is a ``string``, ``list``, or ``object``, merge it - with the first element in the next list by applying these - rules recursively. - - ``object``: concatenate the (field name, field value) - pairs. If a field name is duplicated, then apply these - rules recursively to merge the field values. - - Some examples of merging: - - :: - - # Strings are concatenated. - "foo", "bar" => "foobar" - - # Lists of non-strings are concatenated. - [2, 3], [4] => [2, 3, 4] - - # Lists are concatenated, but the last and first elements are merged - # because they are strings. - ["a", "b"], ["c", "d"] => ["a", "bc", "d"] - - # Lists are concatenated, but the last and first elements are merged - # because they are lists. Recursively, the last and first elements - # of the inner lists are merged because they are strings. - ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] - - # Non-overlapping object fields are combined. - {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} - - # Overlapping object fields are merged. - {"a": "1"}, {"a": "2"} => {"a": "12"} - - # Examples of merging objects containing lists of strings. - {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} - - For a more complete example, suppose a streaming SQL query - is yielding a result set whose rows contain a single string - field. The following ``PartialResultSet``\ s might be - yielded: - - :: - - { - "metadata": { ... } - "values": ["Hello", "W"] - "chunked_value": true - "resume_token": "Af65..." - } - { - "values": ["orl"] - "chunked_value": true - "resume_token": "Bqp2..." - } - { - "values": ["d"] - "resume_token": "Zx1B..." - } - - This sequence of ``PartialResultSet``\ s encodes two rows, - one containing the field value ``"Hello"``, and a second - containing the field value ``"World" = "W" + "orl" + "d"``. - chunked_value (bool): - If true, then the final value in - [values][google.spanner.v1.PartialResultSet.values] is - chunked, and must be combined with more values from - subsequent ``PartialResultSet``\ s to obtain a complete - field value. - resume_token (bytes): - Streaming calls might be interrupted for a variety of - reasons, such as TCP connection loss. If this occurs, the - stream of results can be resumed by re-sending the original - request and including ``resume_token``. Note that executing - any other transaction in the same session invalidates the - token. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the statement that - produced this streaming result set. These can be requested - by setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - and are sent only once with the last response in the stream. - This field will also be present in the last response for DML - statements. - """ - - metadata = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - values = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - chunked_value = proto.Field( - proto.BOOL, - number=3, - ) - resume_token = proto.Field( - proto.BYTES, - number=4, - ) - stats = proto.Field( - proto.MESSAGE, - number=5, - message='ResultSetStats', - ) - - -class ResultSetMetadata(proto.Message): - r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - Attributes: - row_type (google.cloud.spanner_v1.types.StructType): - Indicates the field names and types for the rows in the - result set. For example, a SQL query like - ``"SELECT UserId, UserName FROM Users"`` could return a - ``row_type`` value like: - - :: - - "fields": [ - { "name": "UserId", "type": { "code": "INT64" } }, - { "name": "UserName", "type": { "code": "STRING" } }, - ] - transaction (google.cloud.spanner_v1.types.Transaction): - If the read or SQL query began a transaction - as a side-effect, the information about the new - transaction is yielded here. - """ - - row_type = proto.Field( - proto.MESSAGE, - number=1, - message=gs_type.StructType, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - - -class ResultSetStats(proto.Message): - r"""Additional statistics about a - [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_plan (google.cloud.spanner_v1.types.QueryPlan): - [QueryPlan][google.spanner.v1.QueryPlan] for the query - associated with this result. - query_stats (google.protobuf.struct_pb2.Struct): - Aggregated statistics from the execution of the query. Only - present when the query is profiled. For example, a query - could return the statistics as follows: - - :: - - { - "rows_returned": "3", - "elapsed_time": "1.22 secs", - "cpu_time": "1.19 secs" - } - row_count_exact (int): - Standard DML returns an exact count of rows - that were modified. - This field is a member of `oneof`_ ``row_count``. - row_count_lower_bound (int): - Partitioned DML does not offer exactly-once - semantics, so it returns a lower bound of the - rows modified. - This field is a member of `oneof`_ ``row_count``. - """ - - query_plan = proto.Field( - proto.MESSAGE, - number=1, - message=gs_query_plan.QueryPlan, - ) - query_stats = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - row_count_exact = proto.Field( - proto.INT64, - number=3, - oneof='row_count', - ) - row_count_lower_bound = proto.Field( - proto.INT64, - number=4, - oneof='row_count', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py deleted file mode 100644 index 187aa7b83b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py +++ /dev/null @@ -1,1263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CreateSessionRequest', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'Session', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'DeleteSessionRequest', - 'RequestOptions', - 'ExecuteSqlRequest', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'Partition', - 'PartitionResponse', - 'ReadRequest', - 'BeginTransactionRequest', - 'CommitRequest', - 'RollbackRequest', - }, -) - - -class CreateSessionRequest(proto.Message): - r"""The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - - Attributes: - database (str): - Required. The database in which the new - session is created. - session (google.cloud.spanner_v1.types.Session): - The session to create. - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - session = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - - -class BatchCreateSessionsRequest(proto.Message): - r"""The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - database (str): - Required. The database in which the new - sessions are created. - session_template (google.cloud.spanner_v1.types.Session): - Parameters to be applied to each created - session. - session_count (int): - Required. The number of sessions to be created in this batch - call. The API may return fewer than the requested number of - sessions. If a specific number of sessions are desired, the - client can make additional calls to BatchCreateSessions - (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - session_template = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - session_count = proto.Field( - proto.INT32, - number=3, - ) - - -class BatchCreateSessionsResponse(proto.Message): - r"""The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - session (Sequence[google.cloud.spanner_v1.types.Session]): - The freshly created sessions. - """ - - session = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - - -class Session(proto.Message): - r"""A session in the Cloud Spanner API. - - Attributes: - name (str): - Output only. The name of the session. This is - always system-assigned. - labels (Sequence[google.cloud.spanner_v1.types.Session.LabelsEntry]): - The labels for the session. - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 64 labels can be associated with a given - session. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the session - is created. - approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The approximate timestamp when - the session is last used. It is typically - earlier than the actual last use time. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - create_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - approximate_last_use_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class GetSessionRequest(proto.Message): - r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. - - Attributes: - name (str): - Required. The name of the session to - retrieve. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSessionsRequest(proto.Message): - r"""The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - database (str): - Required. The database in which to list - sessions. - page_size (int): - Number of sessions to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] - from a previous - [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``labels.env:*`` --> The session has the label "env". - - ``labels.env:dev`` --> The session has the label "env" - and the value of the label contains the string "dev". - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSessionsResponse(proto.Message): - r"""The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - sessions (Sequence[google.cloud.spanner_v1.types.Session]): - The list of requested sessions. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListSessions][google.spanner.v1.Spanner.ListSessions] call - to fetch more of the matching sessions. - """ - - @property - def raw_page(self): - return self - - sessions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSessionRequest(proto.Message): - r"""The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - - Attributes: - name (str): - Required. The name of the session to delete. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class RequestOptions(proto.Message): - r"""Common request options for various APIs. - - Attributes: - priority (google.cloud.spanner_v1.types.RequestOptions.Priority): - Priority for the request. - request_tag (str): - A per-request tag which can be applied to queries or reads, - used for statistics collection. Both request_tag and - transaction_tag can be specified for a read or query that - belongs to a transaction. This field is ignored for requests - where it's not applicable (e.g. CommitRequest). Legal - characters for ``request_tag`` values are all printable - characters (ASCII 32 - 126) and the length of a request_tag - is limited to 50 characters. Values that exceed this limit - are truncated. - transaction_tag (str): - A tag used for statistics collection about this transaction. - Both request_tag and transaction_tag can be specified for a - read or query that belongs to a transaction. The value of - transaction_tag should be the same for all requests - belonging to the same transaction. If this request doesn’t - belong to any transaction, transaction_tag will be ignored. - Legal characters for ``transaction_tag`` values are all - printable characters (ASCII 32 - 126) and the length of a - transaction_tag is limited to 50 characters. Values that - exceed this limit are truncated. - """ - class Priority(proto.Enum): - r"""The relative priority for requests. Note that priority is not - applicable for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - The priority acts as a hint to the Cloud Spanner scheduler and does - not guarantee priority or order of execution. For example: - - - Some parts of a write operation always execute at - ``PRIORITY_HIGH``, regardless of the specified priority. This may - cause you to see an increase in high priority workload even when - executing a low priority request. This can also potentially cause - a priority inversion where a lower priority request will be - fulfilled ahead of a higher priority request. - - If a transaction contains multiple operations with different - priorities, Cloud Spanner does not guarantee to process the - higher priority operations first. There may be other constraints - to satisfy, such as order of operations. - """ - PRIORITY_UNSPECIFIED = 0 - PRIORITY_LOW = 1 - PRIORITY_MEDIUM = 2 - PRIORITY_HIGH = 3 - - priority = proto.Field( - proto.ENUM, - number=1, - enum=Priority, - ) - request_tag = proto.Field( - proto.STRING, - number=2, - ) - transaction_tag = proto.Field( - proto.STRING, - number=3, - ) - - -class ExecuteSqlRequest(proto.Message): - r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] - and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - - Attributes: - session (str): - Required. The session in which the SQL query - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. - For queries, if none is provided, the default is - a temporary read-only transaction with strong - concurrency. - - Standard DML statements require a read-write - transaction. To protect against replays, single- - use transactions are not supported. The caller - must either supply an existing transaction ID or - begin a new transaction. - Partitioned DML requires an existing Partitioned - DML transaction ID. - sql (str): - Required. The SQL string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names must conform to the naming - requirements of identifiers as specified at - https://cloud.google.com/spanner/docs/lexical#identifiers. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It is an error to execute a SQL statement with unbound - parameters. - param_types (Sequence[google.cloud.spanner_v1.types.ExecuteSqlRequest.ParamTypesEntry]): - It is not always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON - strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - resume_token (bytes): - If this request is resuming a previously interrupted SQL - statement execution, ``resume_token`` should be copied from - the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - SQL statement execution to resume where the last one left - off. The rest of the request parameters must exactly match - the request that yielded this token. - query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): - Used to control the amount of debugging information returned - in [ResultSetStats][google.spanner.v1.ResultSetStats]. If - [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] - is set, - [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - can only be set to - [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. - partition_token (bytes): - If present, results will be restricted to the specified - partition previously created using PartitionQuery(). There - must be an exact match for the values of fields common to - this message and the PartitionQueryRequest message used to - create this partition_token. - seqno (int): - A per-transaction sequence number used to - identify this request. This field makes each - request idempotent such that if the request is - received multiple times, at most one will - succeed. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction may be aborted. - Replays of previously handled requests will - yield the same response as the first execution. - Required for DML statements. Ignored for - queries. - query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): - Query optimizer configuration to use for the - given query. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - """ - class QueryMode(proto.Enum): - r"""Mode in which the statement must be processed.""" - NORMAL = 0 - PLAN = 1 - PROFILE = 2 - - class QueryOptions(proto.Message): - r"""Query optimizer configuration. - - Attributes: - optimizer_version (str): - An option to control the selection of optimizer version. - - This parameter allows individual queries to pick different - query optimizer versions. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest supported query optimizer version. If not - specified, Cloud Spanner uses the optimizer version set at - the database level options. Any other positive integer (from - the list of supported optimizer versions) overrides the - default optimizer version for query execution. - - The list of supported optimizer versions can be queried from - SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS. - - Executing a SQL statement with an invalid optimizer version - fails with an ``INVALID_ARGUMENT`` error. - - See - https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer - for more information on managing the query optimizer. - - The ``optimizer_version`` statement hint has precedence over - this setting. - optimizer_statistics_package (str): - An option to control the selection of optimizer statistics - package. - - This parameter allows individual queries to use a different - query optimizer statistics package. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest generated statistics package. If not - specified, Cloud Spanner uses the statistics package set at - the database level options, or the latest package if the - database option is not set. - - The statistics package requested by the query has to be - exempt from garbage collection. This can be achieved with - the following DDL statement: - - :: - - ALTER STATISTICS SET OPTIONS (allow_gc=false) - - The list of available statistics packages can be queried - from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. - - Executing a SQL statement with an invalid optimizer - statistics package or with a statistics package that allows - garbage collection fails with an ``INVALID_ARGUMENT`` error. - """ - - optimizer_version = proto.Field( - proto.STRING, - number=1, - ) - optimizer_statistics_package = proto.Field( - proto.STRING, - number=2, - ) - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql = proto.Field( - proto.STRING, - number=3, - ) - params = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - resume_token = proto.Field( - proto.BYTES, - number=6, - ) - query_mode = proto.Field( - proto.ENUM, - number=7, - enum=QueryMode, - ) - partition_token = proto.Field( - proto.BYTES, - number=8, - ) - seqno = proto.Field( - proto.INT64, - number=9, - ) - query_options = proto.Field( - proto.MESSAGE, - number=10, - message=QueryOptions, - ) - request_options = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - - -class ExecuteBatchDmlRequest(proto.Message): - r"""The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - - Attributes: - session (str): - Required. The session in which the DML - statements should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Required. The transaction to use. Must be a - read-write transaction. - To protect against replays, single-use - transactions are not supported. The caller must - either supply an existing transaction ID or - begin a new transaction. - statements (Sequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): - Required. The list of statements to execute in this batch. - Statements are executed serially, such that the effects of - statement ``i`` are visible to statement ``i+1``. Each - statement must be a DML statement. Execution stops at the - first failed statement; the remaining statements are not - executed. - - Callers must provide at least one statement. - seqno (int): - Required. A per-transaction sequence number - used to identify this request. This field makes - each request idempotent such that if the request - is received multiple times, at most one will - succeed. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction may be aborted. - Replays of previously handled requests will - yield the same response as the first execution. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - """ - - class Statement(proto.Message): - r"""A single DML statement. - - Attributes: - sql (str): - Required. The DML string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - DML string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It is an error to execute a SQL statement with unbound - parameters. - param_types (Sequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement.ParamTypesEntry]): - It is not always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] - as JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - """ - - sql = proto.Field( - proto.STRING, - number=1, - ) - params = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - param_types = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message=gs_type.Type, - ) - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - statements = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Statement, - ) - seqno = proto.Field( - proto.INT64, - number=4, - ) - request_options = proto.Field( - proto.MESSAGE, - number=5, - message='RequestOptions', - ) - - -class ExecuteBatchDmlResponse(proto.Message): - r"""The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of [ResultSet][google.spanner.v1.ResultSet] - messages, one for each DML statement that has successfully executed, - in the same order as the statements in the request. If a statement - fails, the status in the response body identifies the cause of the - failure. - - To check for DML statements that failed, use the following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates - that all statements were executed successfully. - 2. If the status was not ``OK``, check the number of result sets in - the response. If the response contains ``N`` - [ResultSet][google.spanner.v1.ResultSet] messages, then statement - ``N+1`` in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed successfully. - - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, - with the status ``OK``. - - Example 2: - - - Request: 5 DML statements. The third statement has a syntax - error. - - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (``INVALID_ARGUMENT``) status. The number of - [ResultSet][google.spanner.v1.ResultSet] messages indicates that - the third statement failed, and the fourth and fifth statements - were not executed. - - Attributes: - result_sets (Sequence[google.cloud.spanner_v1.types.ResultSet]): - One [ResultSet][google.spanner.v1.ResultSet] for each - statement in the request that ran successfully, in the same - order as the statements in the request. Each - [ResultSet][google.spanner.v1.ResultSet] does not contain - any rows. The - [ResultSetStats][google.spanner.v1.ResultSetStats] in each - [ResultSet][google.spanner.v1.ResultSet] contain the number - of rows modified by the statement. - - Only the first [ResultSet][google.spanner.v1.ResultSet] in - the response contains valid - [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. - status (google.rpc.status_pb2.Status): - If all DML statements are executed successfully, the status - is ``OK``. Otherwise, the error status of the first failed - statement. - """ - - result_sets = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=result_set.ResultSet, - ) - status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class PartitionOptions(proto.Message): - r"""Options for a PartitionQueryRequest and - PartitionReadRequest. - - Attributes: - partition_size_bytes (int): - **Note:** This hint is currently ignored by PartitionQuery - and PartitionRead requests. - - The desired data size for each partition generated. The - default for this option is currently 1 GiB. This is only a - hint. The actual size of each partition may be smaller or - larger than this size request. - max_partitions (int): - **Note:** This hint is currently ignored by PartitionQuery - and PartitionRead requests. - - The desired maximum number of partitions to return. For - example, this may be set to the number of workers available. - The default for this option is currently 10,000. The maximum - value is currently 200,000. This is only a hint. The actual - number of partitions returned may be smaller or larger than - this maximum count request. - """ - - partition_size_bytes = proto.Field( - proto.INT64, - number=1, - ) - max_partitions = proto.Field( - proto.INT64, - number=2, - ) - - -class PartitionQueryRequest(proto.Message): - r"""The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read only snapshot transactions are - supported, read/write and single use - transactions are not. - sql (str): - Required. The query request to generate partitions for. The - request will fail if the query is not root partitionable. - The query plan of a root partitionable query has a single - distributed union operator. A distributed union operator - conceptually divides one or more tables into multiple - splits, remotely evaluates a subquery independently on each - split, and then unions all results. - - This must not contain DML commands, such as INSERT, UPDATE, - or DELETE. Use - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - with a PartitionedDml transaction for large, - partition-friendly DML operations. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It is an error to execute a SQL statement with unbound - parameters. - param_types (Sequence[google.cloud.spanner_v1.types.PartitionQueryRequest.ParamTypesEntry]): - It is not always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.PartitionQueryRequest.params] as - JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL query parameters. - See the definition of [Type][google.spanner.v1.Type] for - more information about SQL types. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql = proto.Field( - proto.STRING, - number=3, - ) - params = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - partition_options = proto.Field( - proto.MESSAGE, - number=6, - message='PartitionOptions', - ) - - -class PartitionReadRequest(proto.Message): - r"""The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read only snapshot transactions are - supported, read/write and single use - transactions are not. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.PartitionReadRequest.table]. This - index is used instead of the table primary key when - interpreting - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - and sorting result rows. See - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - for further information. - columns (Sequence[str]): - The columns of - [table][google.spanner.v1.PartitionReadRequest.table] to be - returned for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.PartitionReadRequest.table] to be - yielded, unless - [index][google.spanner.v1.PartitionReadRequest.index] is - present. If - [index][google.spanner.v1.PartitionReadRequest.index] is - present, then - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - instead names index keys in - [index][google.spanner.v1.PartitionReadRequest.index]. - - It is not an error for the ``key_set`` to name rows that do - not exist in the database. Read yields nothing for - nonexistent rows. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table = proto.Field( - proto.STRING, - number=3, - ) - index = proto.Field( - proto.STRING, - number=4, - ) - columns = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - partition_options = proto.Field( - proto.MESSAGE, - number=9, - message='PartitionOptions', - ) - - -class Partition(proto.Message): - r"""Information returned for each partition returned in a - PartitionResponse. - - Attributes: - partition_token (bytes): - This token can be passed to Read, - StreamingRead, ExecuteSql, or - ExecuteStreamingSql requests to restrict the - results to those identified by this partition - token. - """ - - partition_token = proto.Field( - proto.BYTES, - number=1, - ) - - -class PartitionResponse(proto.Message): - r"""The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - partitions (Sequence[google.cloud.spanner_v1.types.Partition]): - Partitions created by this request. - transaction (google.cloud.spanner_v1.types.Transaction): - Transaction created by this request. - """ - - partitions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Partition', - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - - -class ReadRequest(proto.Message): - r"""The request for [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - - Attributes: - session (str): - Required. The session in which the read - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. If none is provided, - the default is a temporary read-only transaction - with strong concurrency. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.ReadRequest.table]. This index is - used instead of the table primary key when interpreting - [key_set][google.spanner.v1.ReadRequest.key_set] and sorting - result rows. See - [key_set][google.spanner.v1.ReadRequest.key_set] for further - information. - columns (Sequence[str]): - Required. The columns of - [table][google.spanner.v1.ReadRequest.table] to be returned - for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.ReadRequest.table] to be yielded, - unless [index][google.spanner.v1.ReadRequest.index] is - present. If [index][google.spanner.v1.ReadRequest.index] is - present, then - [key_set][google.spanner.v1.ReadRequest.key_set] instead - names index keys in - [index][google.spanner.v1.ReadRequest.index]. - - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field is empty, rows are yielded in table primary key order - (if [index][google.spanner.v1.ReadRequest.index] is empty) - or index key order (if - [index][google.spanner.v1.ReadRequest.index] is non-empty). - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field is not empty, rows will be yielded in an unspecified - order. - - It is not an error for the ``key_set`` to name rows that do - not exist in the database. Read yields nothing for - nonexistent rows. - limit (int): - If greater than zero, only the first ``limit`` rows are - yielded. If ``limit`` is zero, the default is no limit. A - limit cannot be specified if ``partition_token`` is set. - resume_token (bytes): - If this request is resuming a previously interrupted read, - ``resume_token`` should be copied from the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - read to resume where the last read left off. The rest of the - request parameters must exactly match the request that - yielded this token. - partition_token (bytes): - If present, results will be restricted to the specified - partition previously created using PartitionRead(). There - must be an exact match for the values of fields common to - this message and the PartitionReadRequest message used to - create this partition_token. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table = proto.Field( - proto.STRING, - number=3, - ) - index = proto.Field( - proto.STRING, - number=4, - ) - columns = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - limit = proto.Field( - proto.INT64, - number=8, - ) - resume_token = proto.Field( - proto.BYTES, - number=9, - ) - partition_token = proto.Field( - proto.BYTES, - number=10, - ) - request_options = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - Attributes: - session (str): - Required. The session in which the - transaction runs. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new transaction. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. Priority is ignored for - this request. Setting the priority in this request_options - struct will not do anything. To set the priority for a - transaction, set it on the reads and writes that are part of - this transaction instead. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - options = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionOptions, - ) - request_options = proto.Field( - proto.MESSAGE, - number=3, - message='RequestOptions', - ) - - -class CommitRequest(proto.Message): - r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - session (str): - Required. The session in which the - transaction to be committed is running. - transaction_id (bytes): - Commit a previously-started transaction. - This field is a member of `oneof`_ ``transaction``. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, commit - with a temporary transaction is non-idempotent. That is, if - the ``CommitRequest`` is sent to Cloud Spanner more than - once (for instance, due to retries in the application, or in - the transport library), it is possible that the mutations - are executed more than once. If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - This field is a member of `oneof`_ ``transaction``. - mutations (Sequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when this - transaction commits. All mutations are applied - atomically, in the order they appear in this - list. - return_commit_stats (bool): - If ``true``, then statistics related to the transaction will - be included in the - [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. - Default value is ``false``. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction_id = proto.Field( - proto.BYTES, - number=2, - oneof='transaction', - ) - single_use_transaction = proto.Field( - proto.MESSAGE, - number=3, - oneof='transaction', - message=gs_transaction.TransactionOptions, - ) - mutations = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=mutation.Mutation, - ) - return_commit_stats = proto.Field( - proto.BOOL, - number=5, - ) - request_options = proto.Field( - proto.MESSAGE, - number=6, - message='RequestOptions', - ) - - -class RollbackRequest(proto.Message): - r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. - - Attributes: - session (str): - Required. The session in which the - transaction to roll back is running. - transaction_id (bytes): - Required. The transaction to roll back. - """ - - session = proto.Field( - proto.STRING, - number=1, - ) - transaction_id = proto.Field( - proto.BYTES, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py deleted file mode 100644 index 31d6f26bdb..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py +++ /dev/null @@ -1,596 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TransactionOptions', - 'Transaction', - 'TransactionSelector', - }, -) - - -class TransactionOptions(proto.Message): - r"""Transactions - ============ - - Each session can have at most one active transaction at a time (note - that standalone reads and queries use a transaction internally and - do count towards the one transaction limit). After the active - transaction is completed, the session can immediately be re-used for - the next transaction. It is not necessary to create a new session - for each transaction. - - Transaction Modes - ================= - - Cloud Spanner supports three transaction modes: - - 1. Locking read-write. This type of transaction is the only way to - write data into Cloud Spanner. These transactions rely on - pessimistic locking and, if necessary, two-phase commit. Locking - read-write transactions may abort, requiring the application to - retry. - - 2. Snapshot read-only. This transaction type provides guaranteed - consistency across several reads, but does not allow writes. - Snapshot read-only transactions can be configured to read at - timestamps in the past. Snapshot read-only transactions do not - need to be committed. - - 3. Partitioned DML. This type of transaction is used to execute a - single Partitioned DML statement. Partitioned DML partitions the - key space and runs the DML statement over each partition in - parallel using separate, internal transactions that commit - independently. Partitioned DML transactions do not need to be - committed. - - For transactions that only read, snapshot read-only transactions - provide simpler semantics and are almost always faster. In - particular, read-only transactions do not take locks, so they do not - conflict with read-write transactions. As a consequence of not - taking locks, they also do not abort, so retry loops are not needed. - - Transactions may only read/write data in a single database. They - may, however, read/write data in different tables within that - database. - - Locking Read-Write Transactions - ------------------------------- - - Locking transactions may be used to atomically read-modify-write - data anywhere in a database. This type of transaction is externally - consistent. - - Clients should attempt to minimize the amount of time a transaction - is active. Faster transactions commit with higher probability and - cause less contention. Cloud Spanner attempts to keep read locks - active as long as the transaction continues to do reads, and the - transaction has not been terminated by - [Commit][google.spanner.v1.Spanner.Commit] or - [Rollback][google.spanner.v1.Spanner.Rollback]. Long periods of - inactivity at the client may cause Cloud Spanner to release a - transaction's locks and abort it. - - Conceptually, a read-write transaction consists of zero or more - reads or SQL statements followed by - [Commit][google.spanner.v1.Spanner.Commit]. At any time before - [Commit][google.spanner.v1.Spanner.Commit], the client can send a - [Rollback][google.spanner.v1.Spanner.Rollback] request to abort the - transaction. - - Semantics - --------- - - Cloud Spanner can commit the transaction if all read locks it - acquired are still valid at commit time, and it is able to acquire - write locks for all writes. Cloud Spanner can abort the transaction - for any reason. If a commit attempt returns ``ABORTED``, Cloud - Spanner guarantees that the transaction has not modified any user - data in Cloud Spanner. - - Unless the transaction commits, Cloud Spanner makes no guarantees - about how long the transaction's locks were held for. It is an error - to use Cloud Spanner locks for any sort of mutual exclusion other - than between Cloud Spanner transactions themselves. - - Retrying Aborted Transactions - ----------------------------- - - When a transaction aborts, the application can choose to retry the - whole transaction again. To maximize the chances of successfully - committing the retry, the client should execute the retry in the - same session as the original attempt. The original session's lock - priority increases with each consecutive abort, meaning that each - attempt has a slightly better chance of success than the previous. - - Under some circumstances (e.g., many transactions attempting to - modify the same row(s)), a transaction can abort many times in a - short period before successfully committing. Thus, it is not a good - idea to cap the number of retries a transaction can attempt; - instead, it is better to limit the total amount of wall time spent - retrying. - - Idle Transactions - ----------------- - - A transaction is considered idle if it has no outstanding reads or - SQL queries and has not started a read or SQL query within the last - 10 seconds. Idle transactions can be aborted by Cloud Spanner so - that they don't hold on to locks indefinitely. In that case, the - commit will fail with error ``ABORTED``. - - If this behavior is undesirable, periodically executing a simple SQL - query in the transaction (e.g., ``SELECT 1``) prevents the - transaction from becoming idle. - - Snapshot Read-Only Transactions - ------------------------------- - - Snapshot read-only transactions provides a simpler method than - locking read-write transactions for doing several consistent reads. - However, this type of transaction does not support writes. - - Snapshot transactions do not take locks. Instead, they work by - choosing a Cloud Spanner timestamp, then executing all reads at that - timestamp. Since they do not acquire locks, they do not block - concurrent read-write transactions. - - Unlike locking read-write transactions, snapshot read-only - transactions never abort. They can fail if the chosen read timestamp - is garbage collected; however, the default garbage collection policy - is generous enough that most applications do not need to worry about - this in practice. - - Snapshot read-only transactions do not need to call - [Commit][google.spanner.v1.Spanner.Commit] or - [Rollback][google.spanner.v1.Spanner.Rollback] (and in fact are not - permitted to do so). - - To execute a snapshot transaction, the client specifies a timestamp - bound, which tells Cloud Spanner how to choose a read timestamp. - - The types of timestamp bound are: - - - Strong (the default). - - Bounded staleness. - - Exact staleness. - - If the Cloud Spanner database to be read is geographically - distributed, stale read-only transactions can execute more quickly - than strong or read-write transaction, because they are able to - execute far from the leader replica. - - Each type of timestamp bound is discussed in detail below. - - Strong - ------ - - Strong reads are guaranteed to see the effects of all transactions - that have committed before the start of the read. Furthermore, all - rows yielded by a single read are consistent with each other -- if - any part of the read observes a transaction, all parts of the read - see the transaction. - - Strong reads are not repeatable: two consecutive strong read-only - transactions might return inconsistent results if there are - concurrent writes. If consistency across reads is required, the - reads should be executed within a transaction or at an exact read - timestamp. - - See - [TransactionOptions.ReadOnly.strong][google.spanner.v1.TransactionOptions.ReadOnly.strong]. - - Exact Staleness - --------------- - - These timestamp bounds execute reads at a user-specified timestamp. - Reads at a timestamp are guaranteed to see a consistent prefix of - the global transaction history: they observe modifications done by - all transactions with a commit timestamp <= the read timestamp, and - observe none of the modifications done by transactions with a larger - commit timestamp. They will block until all conflicting transactions - that may be assigned commit timestamps <= the read timestamp have - finished. - - The timestamp can either be expressed as an absolute Cloud Spanner - commit timestamp or a staleness relative to the current time. - - These modes do not require a "negotiation phase" to pick a - timestamp. As a result, they execute slightly faster than the - equivalent boundedly stale concurrency modes. On the other hand, - boundedly stale reads usually return fresher results. - - See - [TransactionOptions.ReadOnly.read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.read_timestamp] - and - [TransactionOptions.ReadOnly.exact_staleness][google.spanner.v1.TransactionOptions.ReadOnly.exact_staleness]. - - Bounded Staleness - ----------------- - - Bounded staleness modes allow Cloud Spanner to pick the read - timestamp, subject to a user-provided staleness bound. Cloud Spanner - chooses the newest timestamp within the staleness bound that allows - execution of the reads at the closest available replica without - blocking. - - All rows yielded are consistent with each other -- if any part of - the read observes a transaction, all parts of the read see the - transaction. Boundedly stale reads are not repeatable: two stale - reads, even if they use the same staleness bound, can execute at - different timestamps and thus return inconsistent results. - - Boundedly stale reads execute in two phases: the first phase - negotiates a timestamp among all replicas needed to serve the read. - In the second phase, reads are executed at the negotiated timestamp. - - As a result of the two phase execution, bounded staleness reads are - usually a little slower than comparable exact staleness reads. - However, they are typically able to return fresher results, and are - more likely to execute at the closest replica. - - Because the timestamp negotiation requires up-front knowledge of - which rows will be read, it can only be used with single-use - read-only transactions. - - See - [TransactionOptions.ReadOnly.max_staleness][google.spanner.v1.TransactionOptions.ReadOnly.max_staleness] - and - [TransactionOptions.ReadOnly.min_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.min_read_timestamp]. - - Old Read Timestamps and Garbage Collection - ------------------------------------------ - - Cloud Spanner continuously garbage collects deleted and overwritten - data in the background to reclaim storage space. This process is - known as "version GC". By default, version GC reclaims versions - after they are one hour old. Because of this, Cloud Spanner cannot - perform reads at read timestamps more than one hour in the past. - This restriction also applies to in-progress reads and/or SQL - queries whose timestamp become too old while executing. Reads and - SQL queries with too-old read timestamps fail with the error - ``FAILED_PRECONDITION``. - - Partitioned DML Transactions - ---------------------------- - - Partitioned DML transactions are used to execute DML statements with - a different execution strategy that provides different, and often - better, scalability properties for large, table-wide operations than - DML in a ReadWrite transaction. Smaller scoped statements, such as - an OLTP workload, should prefer using ReadWrite transactions. - - Partitioned DML partitions the keyspace and runs the DML statement - on each partition in separate, internal transactions. These - transactions commit automatically when complete, and run - independently from one another. - - To reduce lock contention, this execution strategy only acquires - read locks on rows that match the WHERE clause of the statement. - Additionally, the smaller per-partition transactions hold locks for - less time. - - That said, Partitioned DML is not a drop-in replacement for standard - DML used in ReadWrite transactions. - - - The DML statement must be fully-partitionable. Specifically, the - statement must be expressible as the union of many statements - which each access only a single row of the table. - - - The statement is not applied atomically to all rows of the table. - Rather, the statement is applied atomically to partitions of the - table, in independent transactions. Secondary index rows are - updated atomically with the base table rows. - - - Partitioned DML does not guarantee exactly-once execution - semantics against a partition. The statement will be applied at - least once to each partition. It is strongly recommended that the - DML statement should be idempotent to avoid unexpected results. - For instance, it is potentially dangerous to run a statement such - as ``UPDATE table SET column = column + 1`` as it could be run - multiple times against some rows. - - - The partitions are committed automatically - there is no support - for Commit or Rollback. If the call returns an error, or if the - client issuing the ExecuteSql call dies, it is possible that some - rows had the statement executed on them successfully. It is also - possible that statement was never executed against other rows. - - - Partitioned DML transactions may only contain the execution of a - single DML statement via ExecuteSql or ExecuteStreamingSql. - - - If any error is encountered during the execution of the - partitioned DML operation (for instance, a UNIQUE INDEX - violation, division by zero, or a value that cannot be stored due - to schema constraints), then the operation is stopped at that - point and an error is returned. It is possible that at this - point, some partitions have been committed (or even committed - multiple times), and other partitions have not been run at all. - - Given the above, Partitioned DML is good fit for large, - database-wide, operations that are idempotent, such as deleting old - rows from a very large table. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): - Transaction may write. - - Authorization to begin a read-write transaction requires - ``spanner.databases.beginOrRollbackReadWriteTransaction`` - permission on the ``session`` resource. - This field is a member of `oneof`_ ``mode``. - partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): - Partitioned DML transaction. - - Authorization to begin a Partitioned DML transaction - requires - ``spanner.databases.beginPartitionedDmlTransaction`` - permission on the ``session`` resource. - This field is a member of `oneof`_ ``mode``. - read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): - Transaction will not write. - - Authorization to begin a read-only transaction requires - ``spanner.databases.beginReadOnlyTransaction`` permission on - the ``session`` resource. - This field is a member of `oneof`_ ``mode``. - """ - - class ReadWrite(proto.Message): - r"""Message type to initiate a read-write transaction. Currently - this transaction type has no options. - - """ - - class PartitionedDml(proto.Message): - r"""Message type to initiate a Partitioned DML transaction. - """ - - class ReadOnly(proto.Message): - r"""Message type to initiate a read-only transaction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - strong (bool): - Read at a timestamp where all previously - committed transactions are visible. - This field is a member of `oneof`_ ``timestamp_bound``. - min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at a timestamp >= ``min_read_timestamp``. - - This is useful for requesting fresher data than some - previous read, or data that is fresh enough to observe the - effects of some previously committed transaction whose - timestamp is known. - - Note that this option can only be used in single-use - transactions. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - This field is a member of `oneof`_ ``timestamp_bound``. - max_staleness (google.protobuf.duration_pb2.Duration): - Read data at a timestamp >= ``NOW - max_staleness`` seconds. - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading the freshest data available at a nearby - replica, while bounding the possible staleness if the local - replica has fallen behind. - - Note that this option can only be used in single-use - transactions. - This field is a member of `oneof`_ ``timestamp_bound``. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at the given timestamp. Unlike other - modes, reads at a specific timestamp are repeatable; the - same read at the same timestamp always returns the same - data. If the timestamp is in the future, the read will block - until the specified timestamp, modulo the read's deadline. - - Useful for large scale consistent reads such as mapreduces, - or for coordinating many reads against a consistent snapshot - of the data. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - This field is a member of `oneof`_ ``timestamp_bound``. - exact_staleness (google.protobuf.duration_pb2.Duration): - Executes all reads at a timestamp that is - ``exact_staleness`` old. The timestamp is chosen soon after - the read is started. - - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading at nearby replicas without the - distributed timestamp negotiation overhead of - ``max_staleness``. - This field is a member of `oneof`_ ``timestamp_bound``. - return_read_timestamp (bool): - If true, the Cloud Spanner-selected read timestamp is - included in the [Transaction][google.spanner.v1.Transaction] - message that describes the transaction. - """ - - strong = proto.Field( - proto.BOOL, - number=1, - oneof='timestamp_bound', - ) - min_read_timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - max_staleness = proto.Field( - proto.MESSAGE, - number=3, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - read_timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - exact_staleness = proto.Field( - proto.MESSAGE, - number=5, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - return_read_timestamp = proto.Field( - proto.BOOL, - number=6, - ) - - read_write = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ReadWrite, - ) - partitioned_dml = proto.Field( - proto.MESSAGE, - number=3, - oneof='mode', - message=PartitionedDml, - ) - read_only = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=ReadOnly, - ) - - -class Transaction(proto.Message): - r"""A transaction. - - Attributes: - id (bytes): - ``id`` may be used to identify the transaction in subsequent - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], - [Commit][google.spanner.v1.Spanner.Commit], or - [Rollback][google.spanner.v1.Spanner.Rollback] calls. - - Single-use read-only transactions do not have IDs, because - single-use transactions do not support multiple requests. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - For snapshot read-only transactions, the read timestamp - chosen for the transaction. Not returned by default: see - [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - """ - - id = proto.Field( - proto.BYTES, - number=1, - ) - read_timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class TransactionSelector(proto.Message): - r"""This message is used to select the transaction in which a - [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. - - See [TransactionOptions][google.spanner.v1.TransactionOptions] for - more information about transactions. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - single_use (google.cloud.spanner_v1.types.TransactionOptions): - Execute the read or SQL query in a temporary - transaction. This is the most efficient way to - execute a transaction that consists of a single - SQL query. - This field is a member of `oneof`_ ``selector``. - id (bytes): - Execute the read or SQL query in a - previously-started transaction. - This field is a member of `oneof`_ ``selector``. - begin (google.cloud.spanner_v1.types.TransactionOptions): - Begin a new transaction and execute this read or SQL query - in it. The transaction ID of the new transaction is returned - in - [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], - which is a [Transaction][google.spanner.v1.Transaction]. - This field is a member of `oneof`_ ``selector``. - """ - - single_use = proto.Field( - proto.MESSAGE, - number=1, - oneof='selector', - message='TransactionOptions', - ) - id = proto.Field( - proto.BYTES, - number=2, - oneof='selector', - ) - begin = proto.Field( - proto.MESSAGE, - number=3, - oneof='selector', - message='TransactionOptions', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py deleted file mode 100644 index 28bbc6fc51..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TypeCode', - 'Type', - 'StructType', - }, -) - - -class TypeCode(proto.Enum): - r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to - indicate the type of a Cloud Spanner value. - - Each legal value of a type can be encoded to or decoded from a JSON - value, using the encodings described below. All Cloud Spanner values - can be ``null``, regardless of type; ``null``\ s are always encoded - as a JSON ``null``. - """ - TYPE_CODE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - FLOAT64 = 3 - TIMESTAMP = 4 - DATE = 5 - STRING = 6 - BYTES = 7 - ARRAY = 8 - STRUCT = 9 - NUMERIC = 10 - JSON = 11 - - -class Type(proto.Message): - r"""``Type`` indicates the type of a Cloud Spanner value, as might be - stored in a table cell or returned from an SQL query. - - Attributes: - code (google.cloud.spanner_v1.types.TypeCode): - Required. The [TypeCode][google.spanner.v1.TypeCode] for - this type. - array_element_type (google.cloud.spanner_v1.types.Type): - If [code][google.spanner.v1.Type.code] == - [ARRAY][google.spanner.v1.TypeCode.ARRAY], then - ``array_element_type`` is the type of the array elements. - struct_type (google.cloud.spanner_v1.types.StructType): - If [code][google.spanner.v1.Type.code] == - [STRUCT][google.spanner.v1.TypeCode.STRUCT], then - ``struct_type`` provides type information for the struct's - fields. - """ - - code = proto.Field( - proto.ENUM, - number=1, - enum='TypeCode', - ) - array_element_type = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - struct_type = proto.Field( - proto.MESSAGE, - number=3, - message='StructType', - ) - - -class StructType(proto.Message): - r"""``StructType`` defines the fields of a - [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. - - Attributes: - fields (Sequence[google.cloud.spanner_v1.types.StructType.Field]): - The list of fields that make up this struct. Order is - significant, because values of this struct type are - represented as lists, where the order of field values - matches the order of fields in the - [StructType][google.spanner.v1.StructType]. In turn, the - order of fields matches the order of columns in a read - request, or the order of fields in the ``SELECT`` clause of - a query. - """ - - class Field(proto.Message): - r"""Message representing a single field of a struct. - - Attributes: - name (str): - The name of the field. For reads, this is the column name. - For SQL queries, it is the column alias (e.g., ``"Word"`` in - the query ``"SELECT 'hello' AS Word"``), or the column name - (e.g., ``"ColName"`` in the query - ``"SELECT ColName FROM Table"``). Some columns might have an - empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a - query result can contain multiple fields with the same name. - type_ (google.cloud.spanner_v1.types.Type): - The type of the field. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - - fields = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Field, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini deleted file mode 100644 index 4505b48543..0000000000 --- a/owl-bot-staging/spanner/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py deleted file mode 100644 index be3e3a6a51..0000000000 --- a/owl-bot-staging/spanner/v1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/spanner_v1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.9') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.9') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py deleted file mode 100644 index fec728843e..0000000000 --- a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py +++ /dev/null @@ -1,190 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spannerCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_sessions': ('database', 'session_count', 'session_template', ), - 'begin_transaction': ('session', 'options', 'request_options', ), - 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), - 'create_session': ('database', 'session', ), - 'delete_session': ('name', ), - 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), - 'get_session': ('name', ), - 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), - 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), - 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), - 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spannerCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py deleted file mode 100644 index 2455a6cc88..0000000000 --- a/owl-bot-staging/spanner/v1/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-spanner', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', - ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py deleted file mode 100644 index 489b9aef0d..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py +++ /dev/null @@ -1,4046 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient -from google.cloud.spanner_v1.services.spanner import SpannerClient -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.services.spanner import transports -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SpannerClient._get_default_mtls_endpoint(None) is None - assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - SpannerClient, - SpannerAsyncClient, -]) -def test_spanner_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SpannerGrpcTransport, "grpc"), - (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - SpannerClient, - SpannerAsyncClient, -]) -def test_spanner_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_spanner_client_get_transport_class(): - transport = SpannerClient.get_transport_class() - available_transports = [ - transports.SpannerGrpcTransport, - ] - assert transport in available_transports - - transport = SpannerClient.get_transport_class("grpc") - assert transport == transports.SpannerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) -def test_spanner_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_spanner_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SpannerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_create_session(transport: str = 'grpc', request_type=spanner.CreateSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - ) - response = client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CreateSessionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - - -def test_create_session_from_dict(): - test_create_session(request_type=dict) - - -def test_create_session_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - client.create_session() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CreateSessionRequest() - - -@pytest.mark.asyncio -async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - )) - response = await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CreateSessionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_session_async_from_dict(): - await test_create_session_async(request_type=dict) - - -def test_create_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = spanner.Session() - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_create_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -def test_create_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -@pytest.mark.asyncio -async def test_create_session_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -@pytest.mark.asyncio -async def test_create_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -def test_batch_create_sessions(transport: str = 'grpc', request_type=spanner.BatchCreateSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse( - ) - response = client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchCreateSessionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -def test_batch_create_sessions_from_dict(): - test_batch_create_sessions(request_type=dict) - - -def test_batch_create_sessions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - client.batch_create_sessions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchCreateSessionsRequest() - - -@pytest.mark.asyncio -async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( - )) - response = await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchCreateSessionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -@pytest.mark.asyncio -async def test_batch_create_sessions_async_from_dict(): - await test_batch_create_sessions_async(request_type=dict) - - -def test_batch_create_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = spanner.BatchCreateSessionsResponse() - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_create_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_batch_create_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - assert args[0].session_count == 1420 - - -def test_batch_create_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - assert args[0].session_count == 1420 - - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -def test_get_session(transport: str = 'grpc', request_type=spanner.GetSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - ) - response = client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.GetSessionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - - -def test_get_session_from_dict(): - test_get_session(request_type=dict) - - -def test_get_session_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - client.get_session() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.GetSessionRequest() - - -@pytest.mark.asyncio -async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - )) - response = await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.GetSessionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_session_async_from_dict(): - await test_get_session_async(request_type=dict) - - -def test_get_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = spanner.Session() - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_session_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -def test_list_sessions(transport: str = 'grpc', request_type=spanner.ListSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ListSessionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sessions_from_dict(): - test_list_sessions(request_type=dict) - - -def test_list_sessions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - client.list_sessions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ListSessionsRequest() - - -@pytest.mark.asyncio -async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ListSessionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sessions_async_from_dict(): - await test_list_sessions_async(request_type=dict) - - -def test_list_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = spanner.ListSessionsResponse() - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_list_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -def test_list_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -@pytest.mark.asyncio -async def test_list_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -@pytest.mark.asyncio -async def test_list_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -def test_list_sessions_pager(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('database', ''), - )), - ) - pager = client.list_sessions(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, spanner.Session) - for i in results) - -def test_list_sessions_pages(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sessions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sessions_async_pager(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sessions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner.Session) - for i in responses) - -@pytest.mark.asyncio -async def test_list_sessions_async_pages(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_sessions(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_delete_session(transport: str = 'grpc', request_type=spanner.DeleteSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.DeleteSessionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_session_from_dict(): - test_delete_session(request_type=dict) - - -def test_delete_session_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - client.delete_session() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.DeleteSessionRequest() - - -@pytest.mark.asyncio -async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.DeleteSessionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_session_async_from_dict(): - await test_delete_session_async(request_type=dict) - - -def test_delete_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = None - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_session_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -def test_execute_sql(transport: str = 'grpc', request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_execute_sql_from_dict(): - test_execute_sql(request_type=dict) - - -def test_execute_sql_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - client.execute_sql() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - -@pytest.mark.asyncio -async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_execute_sql_async_from_dict(): - await test_execute_sql_async(request_type=dict) - - -def test_execute_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_execute_streaming_sql(transport: str = 'grpc', request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_execute_streaming_sql_from_dict(): - test_execute_streaming_sql(request_type=dict) - - -def test_execute_streaming_sql_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - client.execute_streaming_sql() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest() - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async_from_dict(): - await test_execute_streaming_sql_async(request_type=dict) - - -def test_execute_streaming_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_execute_batch_dml(transport: str = 'grpc', request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ExecuteBatchDmlResponse( - ) - response = client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteBatchDmlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -def test_execute_batch_dml_from_dict(): - test_execute_batch_dml(request_type=dict) - - -def test_execute_batch_dml_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - client.execute_batch_dml() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteBatchDmlRequest() - - -@pytest.mark.asyncio -async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( - )) - response = await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteBatchDmlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -@pytest.mark.asyncio -async def test_execute_batch_dml_async_from_dict(): - await test_execute_batch_dml_async(request_type=dict) - - -def test_execute_batch_dml_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = spanner.ExecuteBatchDmlResponse() - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_batch_dml_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) - await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_read(transport: str = 'grpc', request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_read_from_dict(): - test_read(request_type=dict) - - -def test_read_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - client.read() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - -@pytest.mark.asyncio -async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_read_async_from_dict(): - await test_read_async(request_type=dict) - - -def test_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_streaming_read(transport: str = 'grpc', request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_streaming_read_from_dict(): - test_streaming_read(request_type=dict) - - -def test_streaming_read_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - client.streaming_read() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - -@pytest.mark.asyncio -async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest() - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_streaming_read_async_from_dict(): - await test_streaming_read_async(request_type=dict) - - -def test_streaming_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_streaming_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_begin_transaction(transport: str = 'grpc', request_type=spanner.BeginTransactionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction( - id=b'id_blob', - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BeginTransactionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -def test_begin_transaction_from_dict(): - test_begin_transaction(request_type=dict) - - -def test_begin_transaction_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BeginTransactionRequest() - - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( - id=b'id_blob', - )) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BeginTransactionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - - -def test_begin_transaction_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = transaction.Transaction() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_begin_transaction_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=None), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].options == transaction.TransactionOptions(read_write=None) - - -def test_begin_transaction_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=None), - ) - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=None), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].options == transaction.TransactionOptions(read_write=None) - - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=None), - ) - - -def test_commit(transport: str = 'grpc', request_type=spanner.CommitRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse( - ) - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CommitRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -def test_commit_from_dict(): - test_commit(request_type=dict) - - -def test_commit_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CommitRequest() - - -@pytest.mark.asyncio -async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( - )) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CommitRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - - -def test_commit_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = commit_response.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_commit_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=None), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].mutations == [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=None) - - -def test_commit_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=None), - ) - - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=None), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].mutations == [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=None) - - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=None), - ) - - -def test_rollback(transport: str = 'grpc', request_type=spanner.RollbackRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.RollbackRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_from_dict(): - test_rollback(request_type=dict) - - -def test_rollback_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.RollbackRequest() - - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.RollbackRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - - -def test_rollback_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_rollback_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].transaction_id == b'transaction_id_blob' - - -def test_rollback_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].session == 'session_value' - assert args[0].transaction_id == b'transaction_id_blob' - - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -def test_partition_query(transport: str = 'grpc', request_type=spanner.PartitionQueryRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionQueryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_query_from_dict(): - test_partition_query(request_type=dict) - - -def test_partition_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - client.partition_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionQueryRequest() - - -@pytest.mark.asyncio -async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionQueryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_query_async_from_dict(): - await test_partition_query_async(request_type=dict) - - -def test_partition_query_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_query_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_partition_read(transport: str = 'grpc', request_type=spanner.PartitionReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionReadRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_read_from_dict(): - test_partition_read(request_type=dict) - - -def test_partition_read_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - client.partition_read() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionReadRequest() - - -@pytest.mark.asyncio -async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionReadRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_read_async_from_dict(): - await test_partition_read_async(request_type=dict) - - -def test_partition_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session/value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SpannerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SpannerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SpannerGrpcTransport, - ) - -def test_spanner_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_spanner_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_session', - 'batch_create_sessions', - 'get_session', - 'list_sessions', - 'delete_session', - 'execute_sql', - 'execute_streaming_sql', - 'execute_batch_dml', - 'read', - 'streaming_read', - 'begin_transaction', - 'commit', - 'rollback', - 'partition_query', - 'partition_read', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - -def test_spanner_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id="octopus", - ) - - -def test_spanner_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport() - adc.assert_called_once() - - -def test_spanner_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SpannerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - ], -) -def test_spanner_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SpannerGrpcTransport, grpc_helpers), - (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_spanner_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_spanner_host_no_port(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - ) - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_spanner_host_with_port(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - ) - assert client.transport._host == 'spanner.googleapis.com:8000' - -def test_spanner_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_spanner_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_path(): - project = "squid" - instance = "clam" - database = "whelk" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = SpannerClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "database": "nudibranch", - } - path = SpannerClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_database_path(path) - assert expected == actual - -def test_session_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - session = "nautilus" - expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - actual = SpannerClient.session_path(project, instance, database, session) - assert expected == actual - - -def test_parse_session_path(): - expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "session": "clam", - } - path = SpannerClient.session_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_session_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SpannerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = SpannerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = SpannerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = SpannerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SpannerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = SpannerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = SpannerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = SpannerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SpannerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = SpannerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - transport_class = SpannerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SpannerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc deleted file mode 100644 index 689cd57616..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_database/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in deleted file mode 100644 index 550e337e01..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/spanner_admin_database *.py -recursive-include google/cloud/spanner_admin_database_v1 *.py diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst deleted file mode 100644 index 12e0ba69de..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Spanner Admin Database API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Database API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py deleted file mode 100644 index 5fb348c41a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-database documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-database" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-database-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-spanner-admin-database.tex", - u"google-cloud-spanner-admin-database Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-spanner-admin-database", - u"Google Cloud Spanner Admin Database Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-spanner-admin-database", - u"google-cloud-spanner-admin-database Documentation", - author, - "google-cloud-spanner-admin-database", - "GAPIC library for Google Cloud Spanner Admin Database API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst deleted file mode 100644 index e2947e7442..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_database_v1/services - spanner_admin_database_v1/types diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst deleted file mode 100644 index bd6aab00e4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -DatabaseAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst deleted file mode 100644 index 55e57d8dc0..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Database v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst deleted file mode 100644 index 95e1d7f88b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Spanner Admin Database v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_database_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py deleted file mode 100644 index f93f373840..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient - -from google.cloud.spanner_admin_database_v1.types.backup import Backup -from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse -from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest -from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig -from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo -from google.cloud.spanner_admin_database_v1.types.common import OperationProgress -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType - -__all__ = ('DatabaseAdminClient', - 'DatabaseAdminAsyncClient', - 'Backup', - 'BackupInfo', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'GetBackupRequest', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py deleted file mode 100644 index 1aff0f8660..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.database_admin import DatabaseAdminClient -from .services.database_admin import DatabaseAdminAsyncClient - -from .types.backup import Backup -from .types.backup import BackupInfo -from .types.backup import CreateBackupEncryptionConfig -from .types.backup import CreateBackupMetadata -from .types.backup import CreateBackupRequest -from .types.backup import DeleteBackupRequest -from .types.backup import GetBackupRequest -from .types.backup import ListBackupOperationsRequest -from .types.backup import ListBackupOperationsResponse -from .types.backup import ListBackupsRequest -from .types.backup import ListBackupsResponse -from .types.backup import UpdateBackupRequest -from .types.common import EncryptionConfig -from .types.common import EncryptionInfo -from .types.common import OperationProgress -from .types.spanner_database_admin import CreateDatabaseMetadata -from .types.spanner_database_admin import CreateDatabaseRequest -from .types.spanner_database_admin import Database -from .types.spanner_database_admin import DropDatabaseRequest -from .types.spanner_database_admin import GetDatabaseDdlRequest -from .types.spanner_database_admin import GetDatabaseDdlResponse -from .types.spanner_database_admin import GetDatabaseRequest -from .types.spanner_database_admin import ListDatabaseOperationsRequest -from .types.spanner_database_admin import ListDatabaseOperationsResponse -from .types.spanner_database_admin import ListDatabasesRequest -from .types.spanner_database_admin import ListDatabasesResponse -from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from .types.spanner_database_admin import RestoreDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseRequest -from .types.spanner_database_admin import RestoreInfo -from .types.spanner_database_admin import UpdateDatabaseDdlMetadata -from .types.spanner_database_admin import UpdateDatabaseDdlRequest -from .types.spanner_database_admin import RestoreSourceType - -__all__ = ( - 'DatabaseAdminAsyncClient', -'Backup', -'BackupInfo', -'CreateBackupEncryptionConfig', -'CreateBackupMetadata', -'CreateBackupRequest', -'CreateDatabaseMetadata', -'CreateDatabaseRequest', -'Database', -'DatabaseAdminClient', -'DeleteBackupRequest', -'DropDatabaseRequest', -'EncryptionConfig', -'EncryptionInfo', -'GetBackupRequest', -'GetDatabaseDdlRequest', -'GetDatabaseDdlResponse', -'GetDatabaseRequest', -'ListBackupOperationsRequest', -'ListBackupOperationsResponse', -'ListBackupsRequest', -'ListBackupsResponse', -'ListDatabaseOperationsRequest', -'ListDatabaseOperationsResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'OperationProgress', -'OptimizeRestoredDatabaseMetadata', -'RestoreDatabaseEncryptionConfig', -'RestoreDatabaseMetadata', -'RestoreDatabaseRequest', -'RestoreInfo', -'RestoreSourceType', -'UpdateBackupRequest', -'UpdateDatabaseDdlMetadata', -'UpdateDatabaseDdlRequest', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json deleted file mode 100644 index 1460097dc3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json +++ /dev/null @@ -1,193 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_database_v1", - "protoPackage": "google.spanner.admin.database.v1", - "schema": "1.0", - "services": { - "DatabaseAdmin": { - "clients": { - "grpc": { - "libraryClient": "DatabaseAdminClient", - "rpcs": { - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DatabaseAdminAsyncClient", - "rpcs": { - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py deleted file mode 100644 index 4de65971c2..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py deleted file mode 100644 index eb34609dc2..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DatabaseAdminClient -from .async_client import DatabaseAdminAsyncClient - -__all__ = ( - 'DatabaseAdminClient', - 'DatabaseAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py deleted file mode 100644 index 4a66aa471b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ /dev/null @@ -1,1968 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .client import DatabaseAdminClient - - -class DatabaseAdminAsyncClient: - """Cloud Spanner Database Admin API - The Cloud Spanner Database Admin API can be used to create, - drop, and list databases. It also enables updating the schema of - pre-existing databases. It can be also used to create, delete - and list backups for a database and to restore from an existing - backup. - """ - - _client: DatabaseAdminClient - - DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - - backup_path = staticmethod(DatabaseAdminClient.backup_path) - parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) - crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) - parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) - crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) - parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) - database_path = staticmethod(DatabaseAdminClient.database_path) - parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) - instance_path = staticmethod(DatabaseAdminClient.instance_path) - parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) - common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) - common_project_path = staticmethod(DatabaseAdminClient.common_project_path) - parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) - common_location_path = staticmethod(DatabaseAdminClient.common_location_path) - parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DatabaseAdminClient).get_transport_class, type(DatabaseAdminClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, DatabaseAdminTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DatabaseAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DatabaseAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_databases(self, - request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatabasesAsyncPager: - r"""Lists Cloud Spanner databases. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (:class:`str`): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_databases, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabasesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, - *, - parent: str = None, - create_statement: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (:class:`str`): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (:class:`str`): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, create_statement]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_database, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (:class:`str`): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_database, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database_ddl(self, - request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, - *, - database: str = None, - statements: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): - The request object. Enqueues the given DDL statements to - be applied, in order but not necessarily all at once, to - the database schema at some point (or points) in the - future. The server checks that the statements are - executable (syntactically valid, name tables that exist, - etc.) before enqueueing them, but they may still fail - upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - `NULL` value in a column to which `NOT NULL` would be - added). If a statement fails, all subsequent statements - in the batch are automatically cancelled. - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (:class:`str`): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (:class:`Sequence[str]`): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - The JSON representation for Empty is empty JSON - object {}. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, statements]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements: - request.statements.extend(statements) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_database_ddl, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - async def drop_database(self, - request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (:class:`str`): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.DropDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.drop_database, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_database_ddl(self, - request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (:class:`str`): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.GetDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_database_ddl, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for `SetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for `GetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for - `TestIamPermissions` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`Sequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_backup(self, - request: Union[gsad_backup.CreateBackupRequest, dict] = None, - *, - parent: str = None, - backup: gsad_backup.Backup = None, - backup_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (:class:`str`): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (:class:`str`): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup, backup_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = gsad_backup.CreateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_backup, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - async def get_backup(self, - request: Union[backup.GetBackupRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (:class:`str`): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = backup.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup(self, - request: Union[gsad_backup.UpdateBackupRequest, dict] = None, - *, - backup: gsad_backup.Backup = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = gsad_backup.UpdateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup(self, - request: Union[backup.DeleteBackupRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (:class:`str`): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = backup.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_backups(self, - request: Union[backup.ListBackupsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBackupsAsyncPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (:class:`str`): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = backup.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_backups, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restore_database(self, - request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, - *, - parent: str = None, - database_id: str = None, - backup: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (:class:`str`): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`str`): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database_id, backup]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.RestoreDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.restore_database, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def list_database_operations(self, - request: Union[spanner_database_admin.ListDatabaseOperationsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatabaseOperationsAsyncPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (:class:`str`): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_database_operations, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabaseOperationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_operations(self, - request: Union[backup.ListBackupOperationsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBackupOperationsAsyncPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (:class:`str`): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = backup.ListBackupOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_backup_operations, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupOperationsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner-admin-database", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "DatabaseAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py deleted file mode 100644 index 1430fa5d4c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ /dev/null @@ -1,2116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DatabaseAdminGrpcTransport -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport - - -class DatabaseAdminClientMeta(type): - """Metaclass for the DatabaseAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] - _transport_registry["grpc"] = DatabaseAdminGrpcTransport - _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[DatabaseAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): - """Cloud Spanner Database Admin API - The Cloud Spanner Database Admin API can be used to create, - drop, and list databases. It also enables updating the schema of - pre-existing databases. It can be also used to create, delete - and list backups for a database and to restore from an existing - backup. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path(project: str,instance: str,backup: str,) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str,str]: - """Parses a backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: - """Returns a fully-qualified crypto_key_version string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - - @staticmethod - def parse_crypto_key_version_path(path: str) -> Dict[str,str]: - """Parses a crypto_key_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DatabaseAdminTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DatabaseAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DatabaseAdminTransport): - # transport is a DatabaseAdminTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def list_databases(self, - request: Union[spanner_database_admin.ListDatabasesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatabasesPager: - r"""Lists Cloud Spanner databases. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.ListDatabasesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.ListDatabasesRequest): - request = spanner_database_admin.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabasesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Union[spanner_database_admin.CreateDatabaseRequest, dict] = None, - *, - parent: str = None, - create_statement: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (str): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, create_statement]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.CreateDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): - request = spanner_database_admin.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Union[spanner_database_admin.GetDatabaseRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (str): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.GetDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.GetDatabaseRequest): - request = spanner_database_admin.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database_ddl(self, - request: Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict] = None, - *, - database: str = None, - statements: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): - The request object. Enqueues the given DDL statements to - be applied, in order but not necessarily all at once, to - the database schema at some point (or points) in the - future. The server checks that the statements are - executable (syntactically valid, name tables that exist, - etc.) before enqueueing them, but they may still fail - upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - `NULL` value in a column to which `NOT NULL` would be - added). If a statement fails, all subsequent statements - in the batch are automatically cancelled. - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (str): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (Sequence[str]): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - The JSON representation for Empty is empty JSON - object {}. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, statements]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.UpdateDatabaseDdlRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements is not None: - request.statements = statements - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - def drop_database(self, - request: Union[spanner_database_admin.DropDatabaseRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (str): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.DropDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.DropDatabaseRequest): - request = spanner_database_admin.DropDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.drop_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_database_ddl(self, - request: Union[spanner_database_admin.GetDatabaseDdlRequest, dict] = None, - *, - database: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (str): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([database]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.GetDatabaseDdlRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): - request = spanner_database_admin.GetDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for `SetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for `GetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for - `TestIamPermissions` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (Sequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_backup(self, - request: Union[gsad_backup.CreateBackupRequest, dict] = None, - *, - parent: str = None, - backup: gsad_backup.Backup = None, - backup_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (str): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup, backup_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a gsad_backup.CreateBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, gsad_backup.CreateBackupRequest): - request = gsad_backup.CreateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - def get_backup(self, - request: Union[backup.GetBackupRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a backup.GetBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, backup.GetBackupRequest): - request = backup.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup(self, - request: Union[gsad_backup.UpdateBackupRequest, dict] = None, - *, - backup: gsad_backup.Backup = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a gsad_backup.UpdateBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, gsad_backup.UpdateBackupRequest): - request = gsad_backup.UpdateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup(self, - request: Union[backup.DeleteBackupRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (str): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a backup.DeleteBackupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, backup.DeleteBackupRequest): - request = backup.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_backups(self, - request: Union[backup.ListBackupsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBackupsPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (str): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a backup.ListBackupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, backup.ListBackupsRequest): - request = backup.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restore_database(self, - request: Union[spanner_database_admin.RestoreDatabaseRequest, dict] = None, - *, - parent: str = None, - database_id: str = None, - backup: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (str): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (str): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database_id, backup]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.RestoreDatabaseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): - request = spanner_database_admin.RestoreDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - def list_database_operations(self, - request: Union[spanner_database_admin.ListDatabaseOperationsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDatabaseOperationsPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (str): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_database_admin.ListDatabaseOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_database_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabaseOperationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_operations(self, - request: Union[backup.ListBackupOperationsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBackupOperationsPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (str): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a backup.ListBackupOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, backup.ListBackupOperationsRequest): - request = backup.ListBackupOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupOperationsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner-admin-database", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "DatabaseAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py deleted file mode 100644 index cc8cb77f48..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ /dev/null @@ -1,508 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.longrunning import operations_pb2 # type: ignore - - -class ListDatabasesPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabasesResponse], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_database_admin.Database]: - for page in self.pages: - yield from page.databases - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesAsyncPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: - async def async_generator(): - async for page in self.pages: - for response in page.databases: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupsResponse], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backup.Backup]: - for page in self.pages: - yield from page.backups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsAsyncPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupsResponse]], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[backup.Backup]: - async def async_generator(): - async for page in self.pages: - for response in page.backups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsAsyncPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupOperationsResponse], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsAsyncPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py deleted file mode 100644 index 9a0047f713..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DatabaseAdminTransport -from .grpc import DatabaseAdminGrpcTransport -from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] -_transport_registry['grpc'] = DatabaseAdminGrpcTransport -_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport - -__all__ = ( - 'DatabaseAdminTransport', - 'DatabaseAdminGrpcTransport', - 'DatabaseAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py deleted file mode 100644 index 5ef2051c71..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ /dev/null @@ -1,467 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-spanner-admin-database', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class DatabaseAdminTransport(abc.ABC): - """Abstract transport class for DatabaseAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database_ddl: gapic_v1.method.wrap_method( - self.update_database_ddl, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.drop_database: gapic_v1.method.wrap_method( - self.drop_database, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database_ddl: gapic_v1.method.wrap_method( - self.get_database_ddl, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.create_backup: gapic_v1.method.wrap_method( - self.create_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup: gapic_v1.method.wrap_method( - self.update_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.restore_database: gapic_v1.method.wrap_method( - self.restore_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_operations: gapic_v1.method.wrap_method( - self.list_database_operations, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_operations: gapic_v1.method.wrap_method( - self.list_backup_operations, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Union[ - spanner_database_admin.ListDatabasesResponse, - Awaitable[spanner_database_admin.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Union[ - spanner_database_admin.Database, - Awaitable[spanner_database_admin.Database] - ]]: - raise NotImplementedError() - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Union[ - spanner_database_admin.GetDatabaseDdlResponse, - Awaitable[spanner_database_admin.GetDatabaseDdlResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Union[ - backup.Backup, - Awaitable[backup.Backup] - ]]: - raise NotImplementedError() - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Union[ - gsad_backup.Backup, - Awaitable[gsad_backup.Backup] - ]]: - raise NotImplementedError() - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Union[ - backup.ListBackupsResponse, - Awaitable[backup.ListBackupsResponse] - ]]: - raise NotImplementedError() - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Union[ - spanner_database_admin.ListDatabaseOperationsResponse, - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Union[ - backup.ListBackupOperationsResponse, - Awaitable[backup.ListBackupOperationsResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'DatabaseAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py deleted file mode 100644 index bcb77aea14..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ /dev/null @@ -1,802 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO - - -class DatabaseAdminGrpcTransport(DatabaseAdminTransport): - """gRPC backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - The Cloud Spanner Database Admin API can be used to create, - drop, and list databases. It also enables updating the schema of - pre-existing databases. It can be also used to create, delete - and list backups for a database and to restore from an existing - backup. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Sanity check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - spanner_database_admin.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - spanner_database_admin.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - empty_pb2.Empty]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. - - Returns: - Callable[[~.DropDatabaseRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - spanner_database_admin.GetDatabaseDdlResponse]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - ~.GetDatabaseDdlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - backup.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - gsad_backup.Backup]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - backup.ListBackupsResponse]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - spanner_database_admin.ListDatabaseOperationsResponse]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - ~.ListDatabaseOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - backup.ListBackupOperationsResponse]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - ~.ListBackupOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - def close(self): - self.grpc_channel.close() - -__all__ = ( - 'DatabaseAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py deleted file mode 100644 index f34545f412..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,806 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import DatabaseAdminGrpcTransport - - -class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): - """gRPC AsyncIO backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - The Cloud Spanner Database Admin API can be used to create, - drop, and list databases. It also enables updating the schema of - pre-existing databases. It can be also used to create, delete - and list backups for a database and to restore from an existing - backup. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Sanity check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Awaitable[spanner_database_admin.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Awaitable[spanner_database_admin.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. - - Returns: - Callable[[~.DropDatabaseRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - Awaitable[~.GetDatabaseDdlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Awaitable[backup.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Awaitable[gsad_backup.Backup]]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Awaitable[backup.ListBackupsResponse]]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - Awaitable[~.ListDatabaseOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Awaitable[backup.ListBackupOperationsResponse]]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - Awaitable[~.ListBackupOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'DatabaseAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py deleted file mode 100644 index 3fa15e4383..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backup import ( - Backup, - BackupInfo, - CreateBackupEncryptionConfig, - CreateBackupMetadata, - CreateBackupRequest, - DeleteBackupRequest, - GetBackupRequest, - ListBackupOperationsRequest, - ListBackupOperationsResponse, - ListBackupsRequest, - ListBackupsResponse, - UpdateBackupRequest, -) -from .common import ( - EncryptionConfig, - EncryptionInfo, - OperationProgress, -) -from .spanner_database_admin import ( - CreateDatabaseMetadata, - CreateDatabaseRequest, - Database, - DropDatabaseRequest, - GetDatabaseDdlRequest, - GetDatabaseDdlResponse, - GetDatabaseRequest, - ListDatabaseOperationsRequest, - ListDatabaseOperationsResponse, - ListDatabasesRequest, - ListDatabasesResponse, - OptimizeRestoredDatabaseMetadata, - RestoreDatabaseEncryptionConfig, - RestoreDatabaseMetadata, - RestoreDatabaseRequest, - RestoreInfo, - UpdateDatabaseDdlMetadata, - UpdateDatabaseDdlRequest, - RestoreSourceType, -) - -__all__ = ( - 'Backup', - 'BackupInfo', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'GetBackupRequest', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py deleted file mode 100644 index 0d2469d461..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py +++ /dev/null @@ -1,638 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'Backup', - 'CreateBackupRequest', - 'CreateBackupMetadata', - 'UpdateBackupRequest', - 'GetBackupRequest', - 'DeleteBackupRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'BackupInfo', - 'CreateBackupEncryptionConfig', - }, -) - - -class Backup(proto.Message): - r"""A backup of a Cloud Spanner database. - - Attributes: - database (str): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Name of the database from which this backup was - created. This needs to be in the same instance as the - backup. Values are of the form - ``projects//instances//databases/``. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup will contain an externally consistent copy of the - database at the timestamp specified by ``version_time``. If - ``version_time`` is not specified, the system will set - ``version_time`` to the ``create_time`` of the backup. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. The expiration time of the backup, with - microseconds granularity that must be at least 6 hours and - at most 366 days from the time the CreateBackup request is - processed. Once the ``expire_time`` has passed, the backup - is eligible to be automatically deleted by Cloud Spanner to - free the resources used by the backup. - name (str): - Output only for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Required for the - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] - operation. - - A globally unique identifier for the backup which cannot be - changed. Values are of the form - ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` - The final segment of the name must be between 2 and 60 - characters in length. - - The backup is stored in the location(s) specified in the - instance configuration of the instance containing the - backup, identified by the prefix of the backup name of the - form ``projects//instances/``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request is received. If the request does not specify - ``version_time``, the ``version_time`` of the backup will be - equivalent to the ``create_time``. - size_bytes (int): - Output only. Size of the backup in bytes. - state (google.cloud.spanner_admin_database_v1.types.Backup.State): - Output only. The current state of the backup. - referencing_databases (Sequence[str]): - Output only. The names of the restored databases that - reference the backup. The database names are of the form - ``projects//instances//databases/``. - Referencing databases may exist in different instances. The - existence of any referencing database prevents the backup - from being deleted. When a restored database from the backup - enters the ``READY`` state, the reference to the backup is - removed. - encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): - Output only. The encryption information for - the backup. - """ - class State(proto.Enum): - r"""Indicates the current state of the backup.""" - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - database = proto.Field( - proto.STRING, - number=2, - ) - version_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - expire_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - name = proto.Field( - proto.STRING, - number=1, - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - size_bytes = proto.Field( - proto.INT64, - number=5, - ) - state = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - referencing_databases = proto.RepeatedField( - proto.STRING, - number=7, - ) - encryption_info = proto.Field( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - - -class CreateBackupRequest(proto.Message): - r"""The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - parent (str): - Required. The name of the instance in which the backup will - be created. This must be the same instance that contains the - database the backup will be created from. The backup will be - stored in the location(s) specified in the instance - configuration of this instance. Values are of the form - ``projects//instances/``. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full backup - name of the form - ``projects//instances//backups/``. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): - Optional. The encryption configuration used to encrypt the - backup. If this field is not specified, the backup will use - the same encryption configuration as the database by - default, namely - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - = ``USE_DATABASE_ENCRYPTION``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - backup_id = proto.Field( - proto.STRING, - number=2, - ) - backup = proto.Field( - proto.MESSAGE, - number=3, - message='Backup', - ) - encryption_config = proto.Field( - proto.MESSAGE, - number=4, - message='CreateBackupEncryptionConfig', - ) - - -class CreateBackupMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - name (str): - The name of the backup being created. - database (str): - The name of the database the backup is - created from. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - database = proto.Field( - proto.STRING, - number=2, - ) - progress = proto.Field( - proto.MESSAGE, - number=3, - message=common.OperationProgress, - ) - cancel_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateBackupRequest(proto.Message): - r"""The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - - Attributes: - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only supported - for the following fields: - - - ``backup.expire_time``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be updated. - This mask is relative to the Backup resource, not to the - request message. The field mask must always be specified; - this prevents any future fields from being erased - accidentally by clients that do not know about them. - """ - - backup = proto.Field( - proto.MESSAGE, - number=1, - message='Backup', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetBackupRequest(proto.Message): - r"""The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - - Attributes: - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupRequest(proto.Message): - r"""The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - - Attributes: - name (str): - Required. Name of the backup to delete. Values are of the - form - ``projects//instances//backups/``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsRequest(proto.Message): - r"""The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - parent (str): - Required. The instance to list backups from. Values are of - the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backups. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Backup][google.spanner.admin.database.v1.Backup] are - eligible for filtering: - - - ``name`` - - ``database`` - - ``state`` - - ``create_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``expire_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``version_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``size_bytes`` - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``name:Howl`` - The backup's name contains the string - "howl". - - ``database:prod`` - The database's name contains the - string "prod". - - ``state:CREATING`` - The backup is pending creation. - - ``state:READY`` - The backup is fully created and ready - for use. - - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` - - The backup name contains the string "howl" and - ``create_time`` of the backup is before - 2018-03-28T14:50:00Z. - - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup - ``expire_time`` is before 2018-03-28T14:50:00Z. - - ``size_bytes > 10000000000`` - The backup's size is - greater than 10GB - page_size (int): - Number of backups to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] - from a previous - [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupsResponse(proto.Message): - r"""The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - backups (Sequence[google.cloud.spanner_admin_database_v1.types.Backup]): - The list of matching backups. Backups returned are ordered - by ``create_time`` in descending order, starting from the - most recent ``create_time``. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] - call to fetch more of the matching backups. - """ - - @property - def raw_page(self): - return self - - backups = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Backup', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBackupOperationsRequest(proto.Message): - r"""The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - parent (str): - Required. The instance of the backup operations. Values are - of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backup - operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. - - ``metadata.`` - any field in metadata.value. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``metadata.database:prod`` - The database the backup was - taken from has a name containing the string "prod". - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``(metadata.name:howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - - The backup name contains the string "howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] - from a previous - [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupOperationsResponse(proto.Message): - r"""The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - operations (Sequence[google.longrunning.operations_pb2.Operation]): - The list of matching backup [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the backup's name and the - operation's - [metadata][google.longrunning.Operation.metadata] will be of - type - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - Operations returned include those that are pending or have - completed/failed/canceled within the last 7 days. Operations - returned are ordered by - ``operation.metadata.value.progress.start_time`` in - descending order starting from the most recently started - operation. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class BackupInfo(proto.Message): - r"""Information about a backup. - - Attributes: - backup (str): - Name of the backup. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup contains an externally consistent copy of - ``source_database`` at the timestamp specified by - ``version_time``. If the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request did not specify ``version_time``, the - ``version_time`` of the backup is equivalent to the - ``create_time``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request was received. - source_database (str): - Name of the database the backup was created - from. - """ - - backup = proto.Field( - proto.STRING, - number=1, - ) - version_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - source_database = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateBackupEncryptionConfig(proto.Message): - r"""Encryption configuration for the backup to create. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): - Required. The encryption type of the backup. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to protect the - backup. This field should be set only when - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the backup.""" - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_DATABASE_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py deleted file mode 100644 index e1f6168dae..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'OperationProgress', - 'EncryptionConfig', - 'EncryptionInfo', - }, -) - - -class OperationProgress(proto.Message): - r"""Encapsulates progress related information for a Cloud Spanner - long running operation. - - Attributes: - progress_percent (int): - Percent completion of the operation. - Values are between 0 and 100 inclusive. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time the request was received. - end_time (google.protobuf.timestamp_pb2.Timestamp): - If set, the time at which this operation - failed or was completed successfully. - """ - - progress_percent = proto.Field( - proto.INT32, - number=1, - ) - start_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class EncryptionConfig(proto.Message): - r"""Encryption configuration for a Cloud Spanner database. - - Attributes: - kms_key_name (str): - The Cloud KMS key to be used for encrypting and decrypting - the database. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - """ - - kms_key_name = proto.Field( - proto.STRING, - number=2, - ) - - -class EncryptionInfo(proto.Message): - r"""Encryption information for a Cloud Spanner database or - backup. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): - Output only. The type of encryption. - encryption_status (google.rpc.status_pb2.Status): - Output only. If present, the status of a - recent encrypt/decrypt call on underlying data - for this database or backup. Regardless of - status, data is always encrypted at rest. - kms_key_version (str): - Output only. A Cloud KMS key version that is - being used to protect the database or backup. - """ - class Type(proto.Enum): - r"""Possible encryption types.""" - TYPE_UNSPECIFIED = 0 - GOOGLE_DEFAULT_ENCRYPTION = 1 - CUSTOMER_MANAGED_ENCRYPTION = 2 - - encryption_type = proto.Field( - proto.ENUM, - number=3, - enum=Type, - ) - encryption_status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - kms_key_version = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py deleted file mode 100644 index 0da1393227..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ /dev/null @@ -1,826 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'RestoreSourceType', - 'RestoreInfo', - 'Database', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'CreateDatabaseRequest', - 'CreateDatabaseMetadata', - 'GetDatabaseRequest', - 'UpdateDatabaseDdlRequest', - 'UpdateDatabaseDdlMetadata', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'RestoreDatabaseRequest', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'OptimizeRestoredDatabaseMetadata', - }, -) - - -class RestoreSourceType(proto.Enum): - r"""Indicates the type of the restore source.""" - TYPE_UNSPECIFIED = 0 - BACKUP = 1 - - -class RestoreInfo(proto.Message): - r"""Information about the database restore. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. The backup may no longer exist. - This field is a member of `oneof`_ ``source_info``. - """ - - source_type = proto.Field( - proto.ENUM, - number=1, - enum='RestoreSourceType', - ) - backup_info = proto.Field( - proto.MESSAGE, - number=2, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - - -class Database(proto.Message): - r"""A Cloud Spanner database. - - Attributes: - name (str): - Required. The name of the database. Values are of the form - ``projects//instances//databases/``, - where ```` is as specified in the - ``CREATE DATABASE`` statement. This name can be passed to - other API methods to identify the database. - state (google.cloud.spanner_admin_database_v1.types.Database.State): - Output only. The current database state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If exists, the time at which the - database creation started. - restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): - Output only. Applicable only for restored - databases. Contains information about the - restore source. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Output only. For databases that are using - customer managed encryption, this field contains - the encryption configuration for the database. - For databases that are using Google default or - other types of encryption, this field is empty. - encryption_info (Sequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): - Output only. For databases that are using - customer managed encryption, this field contains - the encryption information for the database, - such as encryption state and the Cloud KMS key - versions that are in use. - For databases that are using Google default or - other types of encryption, this field is empty. - - This field is propagated lazily from the - backend. There might be a delay from when a key - version is being used and when it appears in - this field. - version_retention_period (str): - Output only. The period in which Cloud Spanner retains all - versions of data for the database. This is the same as the - value of version_retention_period database option set using - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - Defaults to 1 hour, if not set. - earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Earliest timestamp at which - older versions of the data can be read. This - value is continuously updated by Cloud Spanner - and becomes stale the moment it is queried. If - you are using this value to recover data, make - sure to account for the time from the moment - when the value is queried to the moment when you - initiate the recovery. - default_leader (str): - Output only. The read-write region which contains the - database's leader replicas. - - This is the same as the value of default_leader database - option set using DatabaseAdmin.CreateDatabase or - DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this - is empty. - """ - class State(proto.Enum): - r"""Indicates the current state of the database.""" - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - READY_OPTIMIZING = 3 - - name = proto.Field( - proto.STRING, - number=1, - ) - state = proto.Field( - proto.ENUM, - number=2, - enum=State, - ) - create_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - restore_info = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreInfo', - ) - encryption_config = proto.Field( - proto.MESSAGE, - number=5, - message=common.EncryptionConfig, - ) - encryption_info = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - version_retention_period = proto.Field( - proto.STRING, - number=6, - ) - earliest_version_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - default_leader = proto.Field( - proto.STRING, - number=9, - ) - - -class ListDatabasesRequest(proto.Message): - r"""The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - page_size (int): - Number of databases to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] - from a previous - [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabasesResponse(proto.Message): - r"""The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - databases (Sequence[google.cloud.spanner_admin_database_v1.types.Database]): - Databases that matched the request. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] - call to fetch more of the matching databases. - """ - - @property - def raw_page(self): - return self - - databases = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Database', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - parent (str): - Required. The name of the instance that will serve the new - database. Values are of the form - ``projects//instances/``. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which specifies - the ID of the new database. The database ID must conform to - the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be - between 2 and 30 characters in length. If the database ID is - a reserved word or if it contains a hyphen, the database ID - must be enclosed in backticks (:literal:`\``). - extra_statements (Sequence[str]): - Optional. A list of DDL statements to run - inside the newly created database. Statements - can create tables, indexes, etc. These - statements execute atomically with the creation - of the database: if there is an error in any - statement, the database is not created. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Optional. The encryption configuration for - the database. If this field is not specified, - Cloud Spanner will encrypt/decrypt all data at - rest using Google default encryption. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - create_statement = proto.Field( - proto.STRING, - number=2, - ) - extra_statements = proto.RepeatedField( - proto.STRING, - number=3, - ) - encryption_config = proto.Field( - proto.MESSAGE, - number=4, - message=common.EncryptionConfig, - ) - - -class CreateDatabaseMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - database (str): - The database being created. - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseRequest(proto.Message): - r"""The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - - Attributes: - name (str): - Required. The name of the requested database. Values are of - the form - ``projects//instances//databases/``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseDdlRequest(proto.Message): - r"""Enqueues the given DDL statements to be applied, in order but not - necessarily all at once, to the database schema at some point (or - points) in the future. The server checks that the statements are - executable (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon later execution - (e.g., if a statement from another batch of statements is applied - first and it conflicts in some way, or if there is some data-related - problem like a ``NULL`` value in a column to which ``NOT NULL`` - would be added). If a statement fails, all subsequent statements in - the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be used with - the [Operations][google.longrunning.Operations] API to monitor - progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - - Attributes: - database (str): - Required. The database to update. - statements (Sequence[str]): - Required. DDL statements to be applied to the - database. - operation_id (str): - If empty, the new update request is assigned an - automatically-generated operation ID. Otherwise, - ``operation_id`` is used to construct the name of the - resulting [Operation][google.longrunning.Operation]. - - Specifying an explicit operation ID simplifies determining - whether the statements were executed in the event that the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - call is replayed, or the return value is otherwise lost: the - [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] - and ``operation_id`` fields can be combined to form the - [name][google.longrunning.Operation.name] of the resulting - [longrunning.Operation][google.longrunning.Operation]: - ``/operations/``. - - ``operation_id`` should be unique within the database, and - must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that - automatically-generated operation IDs always begin with an - underscore. If the named operation already exists, - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - returns ``ALREADY_EXISTS``. - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - statements = proto.RepeatedField( - proto.STRING, - number=2, - ) - operation_id = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateDatabaseDdlMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - - Attributes: - database (str): - The database being modified. - statements (Sequence[str]): - For an update this list contains all the - statements. For an individual statement, this - list contains only that statement. - commit_timestamps (Sequence[google.protobuf.timestamp_pb2.Timestamp]): - Reports the commit timestamps of all statements that have - succeeded so far, where ``commit_timestamps[i]`` is the - commit timestamp for the statement ``statements[i]``. - throttled (bool): - Output only. When true, indicates that the - operation is throttled e.g due to resource - constraints. When resources become available the - operation will resume and this field will be - false again. - progress (Sequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): - The progress of the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - operations. Currently, only index creation statements will - have a continuously updating progress. For non-index - creation statements, ``progress[i]`` will have start time - and end time populated with commit timestamp of operation, - as well as a progress of 100% once the operation has - completed. ``progress[i]`` is the operation progress for - ``statements[i]``. - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - statements = proto.RepeatedField( - proto.STRING, - number=2, - ) - commit_timestamps = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - throttled = proto.Field( - proto.BOOL, - number=4, - ) - progress = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=common.OperationProgress, - ) - - -class DropDatabaseRequest(proto.Message): - r"""The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - - Attributes: - database (str): - Required. The database to be dropped. - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlRequest(proto.Message): - r"""The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - database (str): - Required. The database whose schema we wish to get. Values - are of the form - ``projects//instances//databases/`` - """ - - database = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlResponse(proto.Message): - r"""The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - statements (Sequence[str]): - A list of formatted DDL statements defining - the schema of the database specified in the - request. - """ - - statements = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class ListDatabaseOperationsRequest(proto.Message): - r"""The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - parent (str): - Required. The instance of the database operations. Values - are of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. - - ``metadata.`` - any field in metadata.value. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` - ``(metadata.source_type:BACKUP) AND`` - ``(metadata.backup_info.backup:backup_howl) AND`` - ``(metadata.name:restored_howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - - The database is restored from a backup. - - The backup name contains "backup_howl". - - The restored database's name contains "restored_howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] - from a previous - [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabaseOperationsResponse(proto.Message): - r"""The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - operations (Sequence[google.longrunning.operations_pb2.Operation]): - The list of matching database [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the database's name. The - operation's - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class RestoreDatabaseRequest(proto.Message): - r"""The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The name of the instance in which to create the - restored database. This instance must be in the same project - and have the same instance configuration as the instance - containing the source backup. Values are of the form - ``projects//instances/``. - database_id (str): - Required. The id of the database to create and restore to. - This database must not already exist. The ``database_id`` - appended to ``parent`` forms the full database name of the - form - ``projects//instances//databases/``. - backup (str): - Name of the backup from which to restore. Values are of the - form - ``projects//instances//backups/``. - This field is a member of `oneof`_ ``source``. - encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): - Optional. An encryption configuration describing the - encryption type and key resources in Cloud KMS used to - encrypt/decrypt the database to restore to. If this field is - not specified, the restored database will use the same - encryption configuration as the backup by default, namely - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - database_id = proto.Field( - proto.STRING, - number=2, - ) - backup = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - encryption_config = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreDatabaseEncryptionConfig', - ) - - -class RestoreDatabaseEncryptionConfig(proto.Message): - r"""Encryption configuration for the restored database. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): - Required. The encryption type of the restored - database. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to - encrypt/decrypt the restored database. This field should be - set only when - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the database to be restored.""" - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name = proto.Field( - proto.STRING, - number=2, - ) - - -class RestoreDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation returned by - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Name of the database being created and - restored to. - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. - This field is a member of `oneof`_ ``source_info``. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - optimize_database_operation_name (str): - If exists, the name of the long-running operation that will - be used to track the post-restore optimization process to - optimize the performance of the restored database, and - remove the dependency on the restore source. The name is of - the form - ``projects//instances//databases//operations/`` - where the is the name of database being created and restored - to. The metadata type of the long-running operation is - [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. - This long-running operation will be automatically created by - the system after the RestoreDatabase long-running operation - completes successfully. This operation will not be created - if the restore was not successful. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - source_type = proto.Field( - proto.ENUM, - number=2, - enum='RestoreSourceType', - ) - backup_info = proto.Field( - proto.MESSAGE, - number=3, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - progress = proto.Field( - proto.MESSAGE, - number=4, - message=common.OperationProgress, - ) - cancel_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - optimize_database_operation_name = proto.Field( - proto.STRING, - number=6, - ) - - -class OptimizeRestoredDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation used to track - the progress of optimizations performed on a newly restored - database. This long-running operation is automatically created - by the system after the successful completion of a database - restore, and cannot be cancelled. - - Attributes: - name (str): - Name of the restored database being - optimized. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the post-restore - optimizations. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - progress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini deleted file mode 100644 index 4505b48543..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py deleted file mode 100644 index 0106b9f16a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/spanner_admin_database_v1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.9') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.9') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py deleted file mode 100644 index cc4c78d884..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py +++ /dev/null @@ -1,192 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_databaseCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), - 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', ), - 'delete_backup': ('name', ), - 'drop_database': ('database', ), - 'get_backup': ('name', ), - 'get_database': ('name', ), - 'get_database_ddl': ('database', ), - 'get_iam_policy': ('resource', 'options', ), - 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_backup': ('backup', 'update_mask', ), - 'update_database_ddl': ('database', 'statements', 'operation_id', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_databaseCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_database client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py deleted file mode 100644 index 3ad2096c24..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-spanner-admin-database', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', - 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', - ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py deleted file mode 100644 index 50ad1d4b84..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ /dev/null @@ -1,5880 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.services.database_admin import transports -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None - assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, - DatabaseAdminAsyncClient, -]) -def test_database_admin_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DatabaseAdminGrpcTransport, "grpc"), - (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, - DatabaseAdminAsyncClient, -]) -def test_database_admin_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_database_admin_client_get_transport_class(): - transport = DatabaseAdminClient.get_transport_class() - available_transports = [ - transports.DatabaseAdminGrpcTransport, - ] - assert transport in available_transports - - transport = DatabaseAdminClient.get_transport_class("grpc") - assert transport == transports.DatabaseAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) -def test_database_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_database_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DatabaseAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_databases(transport: str = 'grpc', request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabasesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_databases_from_dict(): - test_list_databases(request_type=dict) - - -def test_list_databases_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabasesRequest() - - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabasesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - - -def test_list_databases_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_databases_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_pager(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_databases(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in results) - -def test_list_databases_pages(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_databases(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in responses) - -@pytest.mark.asyncio -async def test_list_databases_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_databases(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_create_database(transport: str = 'grpc', request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.CreateDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_database_from_dict(): - test_create_database(request_type=dict) - - -def test_create_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.CreateDatabaseRequest() - - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.CreateDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - - -def test_create_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].create_statement == 'create_statement_value' - - -def test_create_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].create_statement == 'create_statement_value' - - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -def test_get_database(transport: str = 'grpc', request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - - -def test_get_database_from_dict(): - test_get_database(request_type=dict) - - -def test_get_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseRequest() - - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - - -def test_get_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = spanner_database_admin.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -def test_update_database_ddl(transport: str = 'grpc', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_ddl_from_dict(): - test_update_database_ddl(request_type=dict) - - -def test_update_database_ddl_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - client.update_database_ddl() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() - - -@pytest.mark.asyncio -async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_ddl_async_from_dict(): - await test_update_database_ddl_async(request_type=dict) - - -def test_update_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_update_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - assert args[0].statements == ['statements_value'] - - -def test_update_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - assert args[0].statements == ['statements_value'] - - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -def test_drop_database(transport: str = 'grpc', request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.DropDatabaseRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_drop_database_from_dict(): - test_drop_database(request_type=dict) - - -def test_drop_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - client.drop_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.DropDatabaseRequest() - - -@pytest.mark.asyncio -async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.DropDatabaseRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_drop_database_async_from_dict(): - await test_drop_database_async(request_type=dict) - - -def test_drop_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = None - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_drop_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_drop_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -def test_drop_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -@pytest.mark.asyncio -async def test_drop_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -@pytest.mark.asyncio -async def test_drop_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -def test_get_database_ddl(transport: str = 'grpc', request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - ) - response = client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - - -def test_get_database_ddl_from_dict(): - test_get_database_ddl(request_type=dict) - - -def test_get_database_ddl_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - client.get_database_ddl() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() - - -@pytest.mark.asyncio -async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - )) - response = await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseDdlRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - - -@pytest.mark.asyncio -async def test_get_database_ddl_async_from_dict(): - await test_get_database_ddl_async(request_type=dict) - - -def test_get_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database/value', - ) in kw['metadata'] - - -def test_get_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -def test_get_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].database == 'database_value' - - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_from_dict(): - test_set_iam_policy(request_type=dict) - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_set_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_from_dict(): - test_get_iam_policy(request_type=dict) - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_get_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_from_dict(): - test_test_iam_permissions(request_type=dict) - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -def test_test_iam_permissions_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_create_backup(transport: str = 'grpc', request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.CreateBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_from_dict(): - test_create_backup(request_type=dict) - - -def test_create_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - client.create_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.CreateBackupRequest() - - -@pytest.mark.asyncio -async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.CreateBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) - - -def test_create_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].backup == gsad_backup.Backup(database='database_value') - assert args[0].backup_id == 'backup_id_value' - - -def test_create_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -@pytest.mark.asyncio -async def test_create_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].backup == gsad_backup.Backup(database='database_value') - assert args[0].backup_id == 'backup_id_value' - - -@pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -def test_get_backup(transport: str = 'grpc', request_type=backup.GetBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == backup.GetBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - - -def test_get_backup_from_dict(): - test_get_backup(request_type=dict) - - -def test_get_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.GetBackupRequest() - - -@pytest.mark.asyncio -async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - )) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == backup.GetBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - - -def test_get_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -def test_update_backup(transport: str = 'grpc', request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - ) - response = client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.UpdateBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - - -def test_update_backup_from_dict(): - test_update_backup(request_type=dict) - - -def test_update_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - client.update_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.UpdateBackupRequest() - - -@pytest.mark.asyncio -async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - )) - response = await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.UpdateBackupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - - -@pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) - - -def test_update_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'backup.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = gsad_backup.Backup() - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=backup.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'backup.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=backup.name/value', - ) in kw['metadata'] - - -def test_update_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].backup == gsad_backup.Backup(database='database_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].backup == gsad_backup.Backup(database='database_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup(transport: str = 'grpc', request_type=backup.DeleteBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == backup.DeleteBackupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_from_dict(): - test_delete_backup(request_type=dict) - - -def test_delete_backup_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.DeleteBackupRequest() - - -@pytest.mark.asyncio -async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == backup.DeleteBackupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - - -def test_delete_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -def test_list_backups(transport: str = 'grpc', request_type=backup.ListBackupsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backups_from_dict(): - test_list_backups(request_type=dict) - - -def test_list_backups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupsRequest() - - -@pytest.mark.asyncio -async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - - -def test_list_backups_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backup.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_backups_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_backups_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_pager(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backups(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) - for i in results) - -def test_list_backups_pages(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backup.Backup) - for i in responses) - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_backups(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_restore_database(transport: str = 'grpc', request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.RestoreDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_database_from_dict(): - test_restore_database(request_type=dict) - - -def test_restore_database_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - client.restore_database() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.RestoreDatabaseRequest() - - -@pytest.mark.asyncio -async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.RestoreDatabaseRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_database_async_from_dict(): - await test_restore_database_async(request_type=dict) - - -def test_restore_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_restore_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].database_id == 'database_id_value' - assert args[0].backup == 'backup_value' - - -def test_restore_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -@pytest.mark.asyncio -async def test_restore_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].database_id == 'database_id_value' - assert args[0].backup == 'backup_value' - - -@pytest.mark.asyncio -async def test_restore_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -def test_list_database_operations(transport: str = 'grpc', request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_database_operations_from_dict(): - test_list_database_operations(request_type=dict) - - -def test_list_database_operations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - client.list_database_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() - - -@pytest.mark.asyncio -async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_database_operations_async_from_dict(): - await test_list_database_operations_async(request_type=dict) - - -def test_list_database_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_database_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_database_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_database_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -def test_list_database_operations_pager(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_database_operations(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - -def test_list_database_operations_pages(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_database_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_database_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_database_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - -@pytest.mark.asyncio -async def test_list_database_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_database_operations(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_list_backup_operations(transport: str = 'grpc', request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupOperationsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backup_operations_from_dict(): - test_list_backup_operations(request_type=dict) - - -def test_list_backup_operations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - client.list_backup_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupOperationsRequest() - - -@pytest.mark.asyncio -async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupOperationsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backup_operations_async_from_dict(): - await test_list_backup_operations_async(request_type=dict) - - -def test_list_backup_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_backup_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_backup_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_operations_pager(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_operations(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - -def test_list_backup_operations_pages(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_backup_operations(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DatabaseAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatabaseAdminGrpcTransport, - ) - -def test_database_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_database_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_databases', - 'create_database', - 'get_database', - 'update_database_ddl', - 'drop_database', - 'get_database_ddl', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'create_backup', - 'get_backup', - 'update_backup', - 'delete_backup', - 'list_backups', - 'restore_database', - 'list_database_operations', - 'list_backup_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - -def test_database_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_database_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport() - adc.assert_called_once() - - -def test_database_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatabaseAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - ], -) -def test_database_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DatabaseAdminGrpcTransport, grpc_helpers), - (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_database_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_database_admin_host_no_port(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - ) - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_database_admin_host_with_port(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - ) - assert client.transport._host == 'spanner.googleapis.com:8000' - -def test_database_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_database_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_admin_grpc_lro_client(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_database_admin_grpc_lro_async_client(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - instance = "clam" - backup = "whelk" - expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - actual = DatabaseAdminClient.backup_path(project, instance, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "backup": "nudibranch", - } - path = DatabaseAdminClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_backup_path(path) - assert expected == actual - -def test_crypto_key_path(): - project = "cuttlefish" - location = "mussel" - key_ring = "winkle" - crypto_key = "nautilus" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "scallop", - "location": "abalone", - "key_ring": "squid", - "crypto_key": "clam", - } - path = DatabaseAdminClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_path(path) - assert expected == actual - -def test_crypto_key_version_path(): - project = "whelk" - location = "octopus" - key_ring = "oyster" - crypto_key = "nudibranch" - crypto_key_version = "cuttlefish" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) - assert expected == actual - - -def test_parse_crypto_key_version_path(): - expected = { - "project": "mussel", - "location": "winkle", - "key_ring": "nautilus", - "crypto_key": "scallop", - "crypto_key_version": "abalone", - } - path = DatabaseAdminClient.crypto_key_version_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_version_path(path) - assert expected == actual - -def test_database_path(): - project = "squid" - instance = "clam" - database = "whelk" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = DatabaseAdminClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "database": "nudibranch", - } - path = DatabaseAdminClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_database_path(path) - assert expected == actual - -def test_instance_path(): - project = "cuttlefish" - instance = "mussel" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = DatabaseAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "winkle", - "instance": "nautilus", - } - path = DatabaseAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_instance_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DatabaseAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = DatabaseAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = DatabaseAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = DatabaseAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DatabaseAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = DatabaseAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = DatabaseAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = DatabaseAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DatabaseAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = DatabaseAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = DatabaseAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc deleted file mode 100644 index 31f0b429ca..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_instance/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in deleted file mode 100644 index 5af7430e8d..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/spanner_admin_instance *.py -recursive-include google/cloud/spanner_admin_instance_v1 *.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst deleted file mode 100644 index 94972f5b29..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Spanner Admin Instance API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Instance API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py deleted file mode 100644 index 658c054d8e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-instance documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-instance" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-spanner-admin-instance.tex", - u"google-cloud-spanner-admin-instance Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-spanner-admin-instance", - u"Google Cloud Spanner Admin Instance Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-spanner-admin-instance", - u"google-cloud-spanner-admin-instance Documentation", - author, - "google-cloud-spanner-admin-instance", - "GAPIC library for Google Cloud Spanner Admin Instance API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst deleted file mode 100644 index bc65940f95..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_instance_v1/services - spanner_admin_instance_v1/types diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst deleted file mode 100644 index fe820b3fad..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -InstanceAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst deleted file mode 100644 index 407d44cc34..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Instance v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst deleted file mode 100644 index 8f7204ebce..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Spanner Admin Instance v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_instance_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py deleted file mode 100644 index 84caf4479c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient - -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ('InstanceAdminClient', - 'InstanceAdminAsyncClient', - 'CreateInstanceMetadata', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceConfigRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'ReplicaInfo', - 'UpdateInstanceMetadata', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index 971e008623..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.instance_admin import InstanceAdminClient -from .services.instance_admin import InstanceAdminAsyncClient - -from .types.spanner_instance_admin import CreateInstanceMetadata -from .types.spanner_instance_admin import CreateInstanceRequest -from .types.spanner_instance_admin import DeleteInstanceRequest -from .types.spanner_instance_admin import GetInstanceConfigRequest -from .types.spanner_instance_admin import GetInstanceRequest -from .types.spanner_instance_admin import Instance -from .types.spanner_instance_admin import InstanceConfig -from .types.spanner_instance_admin import ListInstanceConfigsRequest -from .types.spanner_instance_admin import ListInstanceConfigsResponse -from .types.spanner_instance_admin import ListInstancesRequest -from .types.spanner_instance_admin import ListInstancesResponse -from .types.spanner_instance_admin import ReplicaInfo -from .types.spanner_instance_admin import UpdateInstanceMetadata -from .types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ( - 'InstanceAdminAsyncClient', -'CreateInstanceMetadata', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'GetInstanceConfigRequest', -'GetInstanceRequest', -'Instance', -'InstanceAdminClient', -'InstanceConfig', -'ListInstanceConfigsRequest', -'ListInstanceConfigsResponse', -'ListInstancesRequest', -'ListInstancesResponse', -'ReplicaInfo', -'UpdateInstanceMetadata', -'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json deleted file mode 100644 index 6fee5bcd53..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json +++ /dev/null @@ -1,123 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_instance_v1", - "protoPackage": "google.spanner.admin.instance.v1", - "schema": "1.0", - "services": { - "InstanceAdmin": { - "clients": { - "grpc": { - "libraryClient": "InstanceAdminClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - } - } - }, - "grpc-async": { - "libraryClient": "InstanceAdminAsyncClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py deleted file mode 100644 index 4de65971c2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py deleted file mode 100644 index 1e59529b7e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import InstanceAdminClient -from .async_client import InstanceAdminAsyncClient - -__all__ = ( - 'InstanceAdminClient', - 'InstanceAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py deleted file mode 100644 index d252771f56..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ /dev/null @@ -1,1301 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .client import InstanceAdminClient - - -class InstanceAdminAsyncClient: - """Cloud Spanner Instance Admin API - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - _client: InstanceAdminClient - - DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - - instance_path = staticmethod(InstanceAdminClient.instance_path) - parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) - instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) - parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) - common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) - common_project_path = staticmethod(InstanceAdminClient.common_project_path) - parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) - common_location_path = staticmethod(InstanceAdminClient.common_location_path) - parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(InstanceAdminClient).get_transport_class, type(InstanceAdminClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, InstanceAdminTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.InstanceAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = InstanceAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_instance_configs(self, - request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstanceConfigsAsyncPager: - r"""Lists the supported instance configurations for a - given project. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (:class:`str`): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instance_configs, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstanceConfigsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance_config(self, - request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (:class:`str`): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.GetInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance_config, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_instances(self, - request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""Lists all instances in the given project. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (:class:`str`): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instances, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance(self, - request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (:class:`str`): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance(self, - request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, - *, - parent: str = None, - instance_id: str = None, - instance: spanner_instance_admin.Instance = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance and begins preparing it to begin serving. - The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (:class:`str`): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (:class:`str`): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.CreateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_instance, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def update_instance(self, - request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, - *, - instance: spanner_instance_admin.Instance = None, - field_mask: field_mask_pb2.FieldMask = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the instance. If the named instance - does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all - resource changes, after which point it terminates with a - ``CANCELLED`` status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, field_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_instance, - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance(self, - request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (:class:`str`): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = spanner_instance_admin.DeleteInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_instance, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def set_iam_policy(self, - request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for `SetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for `GetIamPolicy` - method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for - `TestIamPermissions` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`Sequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=30.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner-admin-instance", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "InstanceAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py deleted file mode 100644 index 9df413e835..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ /dev/null @@ -1,1464 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -OptionalRetry = Union[retries.Retry, object] - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import InstanceAdminGrpcTransport -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport - - -class InstanceAdminClientMeta(type): - """Metaclass for the InstanceAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] - _transport_registry["grpc"] = InstanceAdminGrpcTransport - _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[InstanceAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class InstanceAdminClient(metaclass=InstanceAdminClientMeta): - """Cloud Spanner Instance Admin API - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_config_path(project: str,instance_config: str,) -> str: - """Returns a fully-qualified instance_config string.""" - return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - - @staticmethod - def parse_instance_config_path(path: str) -> Dict[str,str]: - """Parses a instance_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, InstanceAdminTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, InstanceAdminTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, InstanceAdminTransport): - # transport is a InstanceAdminTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def list_instance_configs(self, - request: Union[spanner_instance_admin.ListInstanceConfigsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstanceConfigsPager: - r"""Lists the supported instance configurations for a - given project. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.ListInstanceConfigsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstanceConfigsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance_config(self, - request: Union[spanner_instance_admin.GetInstanceConfigRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (str): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.GetInstanceConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): - request = spanner_instance_admin.GetInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_instances(self, - request: Union[spanner_instance_admin.ListInstancesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: - r"""Lists all instances in the given project. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.ListInstancesRequest): - request = spanner_instance_admin.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance(self, - request: Union[spanner_instance_admin.GetInstanceRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (str): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.GetInstanceRequest): - request = spanner_instance_admin.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance(self, - request: Union[spanner_instance_admin.CreateInstanceRequest, dict] = None, - *, - parent: str = None, - instance_id: str = None, - instance: spanner_instance_admin.Instance = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates an instance and begins preparing it to begin serving. - The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.CreateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): - request = spanner_instance_admin.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - def update_instance(self, - request: Union[spanner_instance_admin.UpdateInstanceRequest, dict] = None, - *, - instance: spanner_instance_admin.Instance = None, - field_mask: field_mask_pb2.FieldMask = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the instance. If the named instance - does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all - resource changes, after which point it terminates with a - ``CANCELLED`` status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, field_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): - request = spanner_instance_admin.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - def delete_instance(self, - request: Union[spanner_instance_admin.DeleteInstanceRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (str): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a spanner_instance_admin.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): - request = spanner_instance_admin.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def set_iam_policy(self, - request: Union[iam_policy_pb2.SetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for `SetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Union[iam_policy_pb2.GetIamPolicyRequest, dict] = None, - *, - resource: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for `GetIamPolicy` - method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. It is used to - specify access control policies for Cloud Platform - resources. - - A Policy is a collection of bindings. A binding binds - one or more members to a single role. Members can be - user accounts, service accounts, Google groups, and - domains (such as G Suite). A role is a named list of - permissions (defined by IAM or configured by users). - A binding can optionally specify a condition, which - is a logic expression that further constrains the - role binding based on attributes about the request - and/or target resource. - - **JSON Example** - - { - "bindings": [ - { - "role": - "roles/resourcemanager.organizationAdmin", - "members": [ "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - - }, { "role": - "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { "title": "expirable access", - "description": "Does not grant access after - Sep 2020", "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", } } - - ] - - } - - **YAML Example** - - bindings: - members: - user:\ mike@example.com - - group:\ admins@example.com - domain:google.com - - serviceAccount:\ my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - user:\ eve@example.com role: - roles/resourcemanager.organizationViewer - condition: title: expirable access description: - Does not grant access after Sep 2020 expression: - request.time < - timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the - [IAM developer's - guide](\ https://cloud.google.com/iam/docs). - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Union[iam_policy_pb2.TestIamPermissionsRequest, dict] = None, - *, - resource: str = None, - permissions: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for - `TestIamPermissions` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (Sequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-spanner-admin-instance", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "InstanceAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py deleted file mode 100644 index add08e57e4..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin - - -class ListInstanceConfigsPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: - for page in self.pages: - yield from page.instance_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigsAsyncPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.instance_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancesResponse], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: - for page in self.pages: - yield from page.instances - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.instances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py deleted file mode 100644 index 6f2453770b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import InstanceAdminTransport -from .grpc import InstanceAdminGrpcTransport -from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] -_transport_registry['grpc'] = InstanceAdminGrpcTransport -_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport - -__all__ = ( - 'InstanceAdminTransport', - 'InstanceAdminGrpcTransport', - 'InstanceAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py deleted file mode 100644 index 8d272f2bea..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ /dev/null @@ -1,325 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-spanner-admin-instance', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class InstanceAdminTransport(abc.ABC): - """Abstract transport class for InstanceAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instance_configs: gapic_v1.method.wrap_method( - self.list_instance_configs, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance_config: gapic_v1.method.wrap_method( - self.get_instance_config, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( -initial=1.0,maximum=32.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Union[ - spanner_instance_admin.ListInstanceConfigsResponse, - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Union[ - spanner_instance_admin.InstanceConfig, - Awaitable[spanner_instance_admin.InstanceConfig] - ]]: - raise NotImplementedError() - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Union[ - spanner_instance_admin.ListInstancesResponse, - Awaitable[spanner_instance_admin.ListInstancesResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Union[ - spanner_instance_admin.Instance, - Awaitable[spanner_instance_admin.Instance] - ]]: - raise NotImplementedError() - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'InstanceAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py deleted file mode 100644 index cfe155bdd7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ /dev/null @@ -1,638 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO - - -class InstanceAdminGrpcTransport(InstanceAdminTransport): - """gRPC backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Sanity check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - spanner_instance_admin.ListInstanceConfigsResponse]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - ~.ListInstanceConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - spanner_instance_admin.InstanceConfig]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - ~.InstanceConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - spanner_instance_admin.ListInstancesResponse]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - spanner_instance_admin.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the instance. If the named instance - does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all - resource changes, after which point it terminates with a - ``CANCELLED`` status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self.grpc_channel.close() - -__all__ = ( - 'InstanceAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py deleted file mode 100644 index 8787614293..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,642 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import InstanceAdminGrpcTransport - - -class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): - """gRPC AsyncIO backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Sanity check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - Awaitable[~.ListInstanceConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Awaitable[spanner_instance_admin.InstanceConfig]]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - Awaitable[~.InstanceConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Awaitable[spanner_instance_admin.ListInstancesResponse]]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Awaitable[spanner_instance_admin.Instance]]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the instance. If the named instance - does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all - resource changes, after which point it terminates with a - ``CANCELLED`` status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'InstanceAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py deleted file mode 100644 index 1577fa9bb7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .spanner_instance_admin import ( - CreateInstanceMetadata, - CreateInstanceRequest, - DeleteInstanceRequest, - GetInstanceConfigRequest, - GetInstanceRequest, - Instance, - InstanceConfig, - ListInstanceConfigsRequest, - ListInstanceConfigsResponse, - ListInstancesRequest, - ListInstancesResponse, - ReplicaInfo, - UpdateInstanceMetadata, - UpdateInstanceRequest, -) - -__all__ = ( - 'CreateInstanceMetadata', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceConfigRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'ReplicaInfo', - 'UpdateInstanceMetadata', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py deleted file mode 100644 index eb7e698e97..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ /dev/null @@ -1,605 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.instance.v1', - manifest={ - 'ReplicaInfo', - 'InstanceConfig', - 'Instance', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'GetInstanceConfigRequest', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'CreateInstanceMetadata', - 'UpdateInstanceMetadata', - }, -) - - -class ReplicaInfo(proto.Message): - r""" - - Attributes: - location (str): - The location of the serving resources, e.g. - "us-central1". - type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): - The type of replica. - default_leader_location (bool): - If true, this location is designated as the default leader - location where leader replicas are placed. See the `region - types - documentation `__ - for more details. - """ - class ReplicaType(proto.Enum): - r"""Indicates the type of replica. See the `replica types - documentation `__ - for more details. - """ - TYPE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - WITNESS = 3 - - location = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.ENUM, - number=2, - enum=ReplicaType, - ) - default_leader_location = proto.Field( - proto.BOOL, - number=3, - ) - - -class InstanceConfig(proto.Message): - r"""A possible configuration for a Cloud Spanner instance. - Configurations define the geographic placement of nodes and - their replication. - - Attributes: - name (str): - A unique identifier for the instance configuration. Values - are of the form - ``projects//instanceConfigs/[a-z][-a-z0-9]*`` - display_name (str): - The name of this instance configuration as it - appears in UIs. - replicas (Sequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): - The geographic placement of nodes in this - instance configuration and their replication - properties. - leader_options (Sequence[str]): - Allowed values of the “default_leader” schema option for - databases in instances that use this instance configuration. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - replicas = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ReplicaInfo', - ) - leader_options = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class Instance(proto.Message): - r"""An isolated set of Cloud Spanner resources on which databases - can be hosted. - - Attributes: - name (str): - Required. A unique identifier for the instance, which cannot - be changed after the instance is created. Values are of the - form - ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. - The final segment of the name must be between 2 and 64 - characters in length. - config (str): - Required. The name of the instance's configuration. Values - are of the form - ``projects//instanceConfigs/``. See - also - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - and - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - display_name (str): - Required. The descriptive name for this - instance as it appears in UIs. Must be unique - per project and between 4 and 30 characters in - length. - node_count (int): - Required. The number of nodes allocated to this instance. - This may be zero in API responses for instances that are not - yet in state ``READY``. - - See `the - documentation `__ - for more information about nodes. - processing_units (int): - The number of processing units allocated to this instance. - At most one of processing_units or node_count should be - present in the message. This may be zero in API responses - for instances that are not yet in state ``READY``. - state (google.cloud.spanner_admin_instance_v1.types.Instance.State): - Output only. The current instance state. For - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], - the state must be either omitted or set to ``CREATING``. For - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], - the state must be either omitted or set to ``READY``. - labels (Sequence[google.cloud.spanner_admin_instance_v1.types.Instance.LabelsEntry]): - Cloud Labels are a flexible and lightweight mechanism for - organizing cloud resources into groups that reflect a - customer's organizational needs and deployment strategies. - Cloud Labels can be used to filter collections of resources. - They can be used to control how resource metrics are - aggregated. And they can be used as arguments to policy - management rules (e.g. route, firewall, load balancing, - etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. And so - you are advised to use an internal label representation, - such as JSON, which doesn't rely upon specific characters - being disallowed. For example, representing labels as the - string: name + "*" + value would prove problematic if we - were to allow "*" in a future release. - endpoint_uris (Sequence[str]): - Deprecated. This field is not populated. - """ - class State(proto.Enum): - r"""Indicates the current state of the instance.""" - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - config = proto.Field( - proto.STRING, - number=2, - ) - display_name = proto.Field( - proto.STRING, - number=3, - ) - node_count = proto.Field( - proto.INT32, - number=5, - ) - processing_units = proto.Field( - proto.INT32, - number=9, - ) - state = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - endpoint_uris = proto.RepeatedField( - proto.STRING, - number=8, - ) - - -class ListInstanceConfigsRequest(proto.Message): - r"""The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values are - of the form ``projects/``. - page_size (int): - Number of instance configurations to be - returned in the response. If 0 or less, defaults - to the server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] - from a previous - [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInstanceConfigsResponse(proto.Message): - r"""The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - instance_configs (Sequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): - The list of requested instance - configurations. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] - call to fetch more of the matching instance configurations. - """ - - @property - def raw_page(self): - return self - - instance_configs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstanceConfigRequest(proto.Message): - r"""The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - - Attributes: - name (str): - Required. The name of the requested instance configuration. - Values are of the form - ``projects//instanceConfigs/``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class GetInstanceRequest(proto.Message): - r"""The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - - Attributes: - name (str): - Required. The name of the requested instance. Values are of - the form ``projects//instances/``. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - If field_mask is present, specifies the subset of - [Instance][google.spanner.admin.instance.v1.Instance] fields - that should be returned. If absent, all - [Instance][google.spanner.admin.instance.v1.Instance] fields - are returned. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - field_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateInstanceRequest(proto.Message): - r"""The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and - must be between 2 and 64 characters in length. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be omitted, - but if specified must be - ``/instances/``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - instance_id = proto.Field( - proto.STRING, - number=2, - ) - instance = proto.Field( - proto.MESSAGE, - number=3, - message='Instance', - ) - - -class ListInstancesRequest(proto.Message): - r"""The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - page_size (int): - Number of instances to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] - from a previous - [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``name`` - - ``display_name`` - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``name:*`` --> The instance has a name. - - ``name:Howl`` --> The instance's name contains the string - "howl". - - ``name:HOWL`` --> Equivalent to above. - - ``NAME:howl`` --> Equivalent to above. - - ``labels.env:*`` --> The instance has the label "env". - - ``labels.env:dev`` --> The instance has the label "env" - and the value of the label contains the string "dev". - - ``name:howl labels.env:dev`` --> The instance's name - contains "howl" and it has the label "env" with its value - containing "dev". - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=4, - ) - - -class ListInstancesResponse(proto.Message): - r"""The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - instances (Sequence[google.cloud.spanner_admin_instance_v1.types.Instance]): - The list of requested instances. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] - call to fetch more of the matching instances. - """ - - @property - def raw_page(self): - return self - - instances = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Instance', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateInstanceRequest(proto.Message): - r"""The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always include - the instance name. Otherwise, only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] should - be updated. The field mask must always be specified; this - prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] from - being erased accidentally by clients that do not know about - them. - """ - - instance = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - field_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteInstanceRequest(proto.Message): - r"""The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - - Attributes: - name (str): - Required. The name of the instance to be deleted. Values are - of the form ``projects//instances/`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The instance being created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The desired end state of the update. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini deleted file mode 100644 index 4505b48543..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py deleted file mode 100644 index 0d39830270..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/spanner_admin_instance_v1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.9') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.9') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py deleted file mode 100644 index afbc7517bc..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ /dev/null @@ -1,185 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_instanceCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_instance': ('name', 'field_mask', ), - 'get_instance_config': ('name', ), - 'list_instance_configs': ('parent', 'page_size', 'page_token', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_instance': ('instance', 'field_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_instanceCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_instance client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py deleted file mode 100644 index 7c84e27a5b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-spanner-admin-instance', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', - 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', - ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index b54a5fcc42..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py deleted file mode 100644 index 6af4075fb9..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ /dev/null @@ -1,3779 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert InstanceAdminClient._get_default_mtls_endpoint(None) is None - assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, - InstanceAdminAsyncClient, -]) -def test_instance_admin_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.InstanceAdminGrpcTransport, "grpc"), - (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, - InstanceAdminAsyncClient, -]) -def test_instance_admin_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_instance_admin_client_get_transport_class(): - transport = InstanceAdminClient.get_transport_class() - available_transports = [ - transports.InstanceAdminGrpcTransport, - ] - assert transport in available_transports - - transport = InstanceAdminClient.get_transport_class("grpc") - assert transport == transports.InstanceAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) -def test_instance_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_instance_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = InstanceAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_list_instance_configs(transport: str = 'grpc', request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instance_configs_from_dict(): - test_list_instance_configs(request_type=dict) - - -def test_list_instance_configs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - client.list_instance_configs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() - - -@pytest.mark.asyncio -async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instance_configs_async_from_dict(): - await test_list_instance_configs_async(request_type=dict) - - -def test_list_instance_configs_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_configs_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_instance_configs_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_instance_configs_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -def test_list_instance_configs_pager(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_configs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in results) - -def test_list_instance_configs_pages(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pager(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in responses) - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pages(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_instance_configs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_instance_config(transport: str = 'grpc', request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - leader_options=['leader_options_value'], - ) - response = client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.leader_options == ['leader_options_value'] - - -def test_get_instance_config_from_dict(): - test_get_instance_config(request_type=dict) - - -def test_get_instance_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - client.get_instance_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() - - -@pytest.mark.asyncio -async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - leader_options=['leader_options_value'], - )) - response = await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceConfigRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.leader_options == ['leader_options_value'] - - -@pytest.mark.asyncio -async def test_get_instance_config_async_from_dict(): - await test_get_instance_config_async(request_type=dict) - - -def test_get_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = spanner_instance_admin.InstanceConfig() - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -def test_list_instances(transport: str = 'grpc', request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instances_from_dict(): - test_list_instances(request_type=dict) - - -def test_list_instances_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancesRequest() - - -@pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - - -def test_list_instances_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_instances_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_instances_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_pager(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instances(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in results) - -def test_list_instances_pages(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in responses) - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_instances(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_instance(transport: str = 'grpc', request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - endpoint_uris=['endpoint_uris_value'], - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.endpoint_uris == ['endpoint_uris_value'] - - -def test_get_instance_from_dict(): - test_get_instance(request_type=dict) - - -def test_get_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - endpoint_uris=['endpoint_uris_value'], - )) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.endpoint_uris == ['endpoint_uris_value'] - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - - -def test_get_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = spanner_instance_admin.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -def test_create_instance(transport: str = 'grpc', request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_from_dict(): - test_create_instance(request_type=dict) - - -def test_create_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) - - -def test_create_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].instance_id == 'instance_id_value' - assert args[0].instance == spanner_instance_admin.Instance(name='name_value') - - -def test_create_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].instance_id == 'instance_id_value' - assert args[0].instance == spanner_instance_admin.Instance(name='name_value') - - -@pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -def test_update_instance(transport: str = 'grpc', request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_from_dict(): - test_update_instance(request_type=dict) - - -def test_update_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceRequest() - - -@pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - - -def test_update_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'instance.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=instance.name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'instance.name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=instance.name/value', - ) in kw['metadata'] - - -def test_update_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].instance == spanner_instance_admin.Instance(name='name_value') - assert args[0].field_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].instance == spanner_instance_admin.Instance(name='name_value') - assert args[0].field_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_instance(transport: str = 'grpc', request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_from_dict(): - test_delete_instance(request_type=dict) - - -def test_delete_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceRequest() - - -@pytest.mark.asyncio -async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_async_from_dict(): - await test_delete_instance_async(request_type=dict) - - -def test_delete_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = None - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -def test_set_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_from_dict(): - test_set_iam_policy(request_type=dict) - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_set_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy(transport: str = 'grpc', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_from_dict(): - test_get_iam_policy(request_type=dict) - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -def test_get_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions(transport: str = 'grpc', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_from_dict(): - test_test_iam_permissions(request_type=dict) - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource/value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -def test_test_iam_permissions_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].resource == 'resource_value' - assert args[0].permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = InstanceAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.InstanceAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.InstanceAdminGrpcTransport, - ) - -def test_instance_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_instance_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instance_configs', - 'get_instance_config', - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - -def test_instance_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_instance_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport() - adc.assert_called_once() - - -def test_instance_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - ], -) -def test_instance_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.InstanceAdminGrpcTransport, grpc_helpers), - (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_instance_admin_host_no_port(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - ) - assert client.transport._host == 'spanner.googleapis.com:443' - - -def test_instance_admin_host_with_port(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - ) - assert client.transport._host == 'spanner.googleapis.com:8000' - -def test_instance_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_instance_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_instance_admin_grpc_lro_client(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_admin_grpc_lro_async_client(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - instance = "clam" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = InstanceAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "whelk", - "instance": "octopus", - } - path = InstanceAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_path(path) - assert expected == actual - -def test_instance_config_path(): - project = "oyster" - instance_config = "nudibranch" - expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - actual = InstanceAdminClient.instance_config_path(project, instance_config) - assert expected == actual - - -def test_parse_instance_config_path(): - expected = { - "project": "cuttlefish", - "instance_config": "mussel", - } - path = InstanceAdminClient.instance_config_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_config_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = InstanceAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = InstanceAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = InstanceAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = InstanceAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = InstanceAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = InstanceAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = InstanceAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = InstanceAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = InstanceAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = InstanceAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = InstanceAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() diff --git a/scripts/fixup_spanner_admin_database_v1_keywords.py b/scripts/fixup_spanner_admin_database_v1_keywords.py index 8a04d60b67..cc4c78d884 100644 --- a/scripts/fixup_spanner_admin_database_v1_keywords.py +++ b/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -39,23 +39,23 @@ def partition( class spanner_admin_databaseCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), - 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', ), - 'delete_backup': ('name', ), - 'drop_database': ('database', ), - 'get_backup': ('name', ), - 'get_database': ('name', ), - 'get_database_ddl': ('database', ), - 'get_iam_policy': ('resource', 'options', ), - 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_backup': ('backup', 'update_mask', ), - 'update_database_ddl': ('database', 'statements', 'operation_id', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), + 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', ), + 'delete_backup': ('name', ), + 'drop_database': ('database', ), + 'get_backup': ('name', ), + 'get_database': ('name', ), + 'get_database_ddl': ('database', ), + 'get_iam_policy': ('resource', 'options', ), + 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_backup': ('backup', 'update_mask', ), + 'update_database_ddl': ('database', 'statements', 'operation_id', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -74,7 +74,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/scripts/fixup_spanner_admin_instance_v1_keywords.py b/scripts/fixup_spanner_admin_instance_v1_keywords.py index f52d1c5fe3..afbc7517bc 100644 --- a/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ b/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -39,16 +39,16 @@ def partition( class spanner_admin_instanceCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_instance': ('name', 'field_mask', ), - 'get_instance_config': ('name', ), - 'list_instance_configs': ('parent', 'page_size', 'page_token', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_instance': ('instance', 'field_mask', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_instance': ('name', 'field_mask', ), + 'get_instance_config': ('name', ), + 'list_instance_configs': ('parent', 'page_size', 'page_token', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_instance': ('instance', 'field_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -67,7 +67,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index bff8352aa8..fec728843e 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -39,21 +39,21 @@ def partition( class spannerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_sessions': ('database', 'session_count', 'session_template', ), - 'begin_transaction': ('session', 'options', 'request_options', ), - 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), - 'create_session': ('database', 'session', ), - 'delete_session': ('name', ), - 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), - 'get_session': ('name', ), - 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), - 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), - 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), - 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), + 'batch_create_sessions': ('database', 'session_count', 'session_template', ), + 'begin_transaction': ('session', 'options', 'request_options', ), + 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), + 'create_session': ('database', 'session', ), + 'delete_session': ('name', ), + 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', ), + 'get_session': ('name', ), + 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), + 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), + 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), + 'rollback': ('session', 'transaction_id', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -72,7 +72,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 1ca405899b..70a37528b5 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -32,6 +31,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_admin_database_v1.services.database_admin import ( @@ -42,9 +42,6 @@ ) from google.cloud.spanner_admin_database_v1.services.database_admin import pagers from google.cloud.spanner_admin_database_v1.services.database_admin import transports -from google.cloud.spanner_admin_database_v1.services.database_admin.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.spanner_admin_database_v1.types import backup from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup from google.cloud.spanner_admin_database_v1.types import common @@ -63,20 +60,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -233,7 +216,7 @@ def test_database_admin_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -250,7 +233,7 @@ def test_database_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -267,7 +250,7 @@ def test_database_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -296,7 +279,7 @@ def test_database_admin_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,7 +338,7 @@ def test_database_admin_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -397,7 +380,7 @@ def test_database_admin_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -419,7 +402,7 @@ def test_database_admin_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -450,7 +433,7 @@ def test_database_admin_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -481,7 +464,7 @@ def test_database_admin_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -4871,13 +4854,15 @@ def test_database_admin_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_database_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -4901,29 +4886,6 @@ def test_database_admin_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_database_admin_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id="octopus", - ) - - def test_database_admin_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -4935,7 +4897,6 @@ def test_database_admin_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_database_admin_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -4951,21 +4912,6 @@ def test_database_admin_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_database_admin_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatabaseAdminClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -4973,7 +4919,6 @@ def test_database_admin_auth_adc_old_google_auth(): transports.DatabaseAdminGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_database_admin_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -4990,29 +4935,6 @@ def test_database_admin_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_database_admin_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -5511,3 +5433,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 567d56d3c6..01e48cf641 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -32,6 +31,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_admin_instance_v1.services.instance_admin import ( @@ -42,9 +42,6 @@ ) from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports -from google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore @@ -56,20 +53,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -226,7 +209,7 @@ def test_instance_admin_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -243,7 +226,7 @@ def test_instance_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -260,7 +243,7 @@ def test_instance_admin_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -289,7 +272,7 @@ def test_instance_admin_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -348,7 +331,7 @@ def test_instance_admin_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -390,7 +373,7 @@ def test_instance_admin_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -412,7 +395,7 @@ def test_instance_admin_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -443,7 +426,7 @@ def test_instance_admin_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -474,7 +457,7 @@ def test_instance_admin_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -3150,13 +3133,15 @@ def test_instance_admin_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_instance_admin_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -3180,29 +3165,6 @@ def test_instance_admin_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_admin_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id="octopus", - ) - - def test_instance_admin_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -3214,7 +3176,6 @@ def test_instance_admin_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_instance_admin_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -3230,21 +3191,6 @@ def test_instance_admin_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_admin_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceAdminClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -3252,7 +3198,6 @@ def test_instance_admin_auth_adc_old_google_auth(): transports.InstanceAdminGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_instance_admin_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -3269,29 +3214,6 @@ def test_instance_admin_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_instance_admin_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.admin", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3702,3 +3624,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 86557f33e4..488e855062 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -29,15 +28,13 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient from google.cloud.spanner_v1.services.spanner import SpannerClient from google.cloud.spanner_v1.services.spanner import pagers from google.cloud.spanner_v1.services.spanner import transports -from google.cloud.spanner_v1.services.spanner.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.spanner_v1.types import commit_response from google.cloud.spanner_v1.types import keys from google.cloud.spanner_v1.types import mutation @@ -53,20 +50,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -201,7 +184,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -218,7 +201,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,7 +218,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,7 +247,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -319,7 +302,7 @@ def test_spanner_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -361,7 +344,7 @@ def test_spanner_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -383,7 +366,7 @@ def test_spanner_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -410,7 +393,7 @@ def test_spanner_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -437,7 +420,7 @@ def test_spanner_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -3270,8 +3253,10 @@ def test_spanner_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_spanner_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -3295,29 +3280,6 @@ def test_spanner_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_spanner_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.data", - ), - quota_project_id="octopus", - ) - - def test_spanner_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -3329,7 +3291,6 @@ def test_spanner_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_spanner_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -3345,26 +3306,10 @@ def test_spanner_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_spanner_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SpannerClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.data", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport,], ) -@requires_google_auth_gte_1_25_0 def test_spanner_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -3381,26 +3326,6 @@ def test_spanner_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport,], -) -@requires_google_auth_lt_1_25_0 -def test_spanner_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/spanner.data", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3782,3 +3707,49 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From ed9908dd8144afcb3f6297338fbf9aca981f391b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 30 Oct 2021 16:13:30 +0000 Subject: [PATCH 3/4] fix(deps): require google-api-core==1.28.0 --- setup.py | 2 +- testing/constraints-3.6.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 9329250ce1..b0db629873 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", # NOTE: Maintainers, please do not require google-cloud-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 2eac9c8653..a37078ed8b 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.26.0 +google-api-core==1.28.0 google-cloud-core==1.4.1 grpc-google-iam-v1==0.12.3 libcst==0.2.5 From f98918050830325bccba008b36f6c58c8ed5c239 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 30 Oct 2021 23:25:50 +0000 Subject: [PATCH 4/4] fix(deps): drop packaging dependency --- setup.py | 1 - testing/constraints-3.6.txt | 1 - 2 files changed, 2 deletions(-) diff --git a/setup.py b/setup.py index b0db629873..5e48c5f031 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,6 @@ "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 1.11.0, != 1.19.6", "sqlparse >= 0.3.0", - "packaging >= 14.3", ] extras = { "tracing": [ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index a37078ed8b..c990f7b6b5 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -14,4 +14,3 @@ sqlparse==0.3.0 opentelemetry-api==1.1.0 opentelemetry-sdk==1.1.0 opentelemetry-instrumentation==0.20b0 -packaging==14.3